mirror of https://github.com/databricks/cli.git
Merge remote-tracking branch 'origin/main' into worktree
This commit is contained in:
commit
46ec3103a3
|
@ -0,0 +1,65 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
)
|
||||||
|
|
||||||
|
type initializeURLs struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitializeURLs makes sure the URL field of each resource is configured.
|
||||||
|
// NOTE: since this depends on an extra API call, this mutator adds some extra
|
||||||
|
// latency. As such, it should only be used when needed.
|
||||||
|
// This URL field is used for the output of the 'bundle summary' CLI command.
|
||||||
|
func InitializeURLs() bundle.Mutator {
|
||||||
|
return &initializeURLs{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *initializeURLs) Name() string {
|
||||||
|
return "InitializeURLs"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *initializeURLs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
workspaceId, err := b.WorkspaceClient().CurrentWorkspaceID(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return diag.FromErr(err)
|
||||||
|
}
|
||||||
|
orgId := strconv.FormatInt(workspaceId, 10)
|
||||||
|
host := b.WorkspaceClient().Config.CanonicalHostName()
|
||||||
|
initializeForWorkspace(b, orgId, host)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func initializeForWorkspace(b *bundle.Bundle, orgId string, host string) error {
|
||||||
|
baseURL, err := url.Parse(host)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add ?o=<workspace id> only if <workspace id> wasn't in the subdomain already.
|
||||||
|
// The ?o= is needed when vanity URLs / legacy workspace URLs are used.
|
||||||
|
// If it's not needed we prefer to leave it out since these URLs are rather
|
||||||
|
// long for most terminals.
|
||||||
|
//
|
||||||
|
// See https://docs.databricks.com/en/workspace/workspace-details.html for
|
||||||
|
// further reading about the '?o=' suffix.
|
||||||
|
if !strings.Contains(baseURL.Hostname(), orgId) {
|
||||||
|
values := baseURL.Query()
|
||||||
|
values.Add("o", orgId)
|
||||||
|
baseURL.RawQuery = values.Encode()
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, group := range b.Config.Resources.AllResources() {
|
||||||
|
for _, r := range group.Resources {
|
||||||
|
r.InitializeURL(*baseURL)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -0,0 +1,130 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/ml"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/serving"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestInitializeURLs(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
Host: "https://mycompany.databricks.com/",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"job1": {
|
||||||
|
ID: "1",
|
||||||
|
JobSettings: &jobs.JobSettings{Name: "job1"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
|
"pipeline1": {
|
||||||
|
ID: "3",
|
||||||
|
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Experiments: map[string]*resources.MlflowExperiment{
|
||||||
|
"experiment1": {
|
||||||
|
ID: "4",
|
||||||
|
Experiment: &ml.Experiment{Name: "experiment1"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Models: map[string]*resources.MlflowModel{
|
||||||
|
"model1": {
|
||||||
|
ID: "a model uses its name for identifier",
|
||||||
|
Model: &ml.Model{Name: "a model uses its name for identifier"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
|
||||||
|
"servingendpoint1": {
|
||||||
|
ID: "my_serving_endpoint",
|
||||||
|
CreateServingEndpoint: &serving.CreateServingEndpoint{
|
||||||
|
Name: "my_serving_endpoint",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
RegisteredModels: map[string]*resources.RegisteredModel{
|
||||||
|
"registeredmodel1": {
|
||||||
|
ID: "8",
|
||||||
|
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
|
||||||
|
Name: "my_registered_model",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
QualityMonitors: map[string]*resources.QualityMonitor{
|
||||||
|
"qualityMonitor1": {
|
||||||
|
CreateMonitor: &catalog.CreateMonitor{
|
||||||
|
TableName: "catalog.schema.qualityMonitor1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Schemas: map[string]*resources.Schema{
|
||||||
|
"schema1": {
|
||||||
|
ID: "catalog.schema",
|
||||||
|
CreateSchema: &catalog.CreateSchema{
|
||||||
|
Name: "schema",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Clusters: map[string]*resources.Cluster{
|
||||||
|
"cluster1": {
|
||||||
|
ID: "1017-103929-vlr7jzcf",
|
||||||
|
ClusterSpec: &compute.ClusterSpec{
|
||||||
|
ClusterName: "cluster1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedURLs := map[string]string{
|
||||||
|
"job1": "https://mycompany.databricks.com/jobs/1?o=123456",
|
||||||
|
"pipeline1": "https://mycompany.databricks.com/pipelines/3?o=123456",
|
||||||
|
"experiment1": "https://mycompany.databricks.com/ml/experiments/4?o=123456",
|
||||||
|
"model1": "https://mycompany.databricks.com/ml/models/a%20model%20uses%20its%20name%20for%20identifier?o=123456",
|
||||||
|
"servingendpoint1": "https://mycompany.databricks.com/ml/endpoints/my_serving_endpoint?o=123456",
|
||||||
|
"registeredmodel1": "https://mycompany.databricks.com/explore/data/models/8?o=123456",
|
||||||
|
"qualityMonitor1": "https://mycompany.databricks.com/explore/data/catalog/schema/qualityMonitor1?o=123456",
|
||||||
|
"schema1": "https://mycompany.databricks.com/explore/data/catalog/schema?o=123456",
|
||||||
|
"cluster1": "https://mycompany.databricks.com/compute/clusters/1017-103929-vlr7jzcf?o=123456",
|
||||||
|
}
|
||||||
|
|
||||||
|
initializeForWorkspace(b, "123456", "https://mycompany.databricks.com/")
|
||||||
|
|
||||||
|
for _, group := range b.Config.Resources.AllResources() {
|
||||||
|
for key, r := range group.Resources {
|
||||||
|
require.Equal(t, expectedURLs[key], r.GetURL(), "Unexpected URL for "+key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInitializeURLsWithoutOrgId(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"job1": {
|
||||||
|
ID: "1",
|
||||||
|
JobSettings: &jobs.JobSettings{Name: "job1"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
initializeForWorkspace(b, "123456", "https://adb-123456.azuredatabricks.net/")
|
||||||
|
|
||||||
|
require.Equal(t, "https://adb-123456.azuredatabricks.net/jobs/1", b.Config.Resources.Jobs["job1"].URL)
|
||||||
|
}
|
|
@ -3,6 +3,7 @@ package config
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -30,6 +31,53 @@ type ConfigResource interface {
|
||||||
// Terraform equivalent name of the resource. For example "databricks_job"
|
// Terraform equivalent name of the resource. For example "databricks_job"
|
||||||
// for jobs and "databricks_pipeline" for pipelines.
|
// for jobs and "databricks_pipeline" for pipelines.
|
||||||
TerraformResourceName() string
|
TerraformResourceName() string
|
||||||
|
|
||||||
|
// GetName returns the in-product name of the resource.
|
||||||
|
GetName() string
|
||||||
|
|
||||||
|
// GetURL returns the URL of the resource.
|
||||||
|
GetURL() string
|
||||||
|
|
||||||
|
// InitializeURL initializes the URL field of the resource.
|
||||||
|
InitializeURL(baseURL url.URL)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResourceGroup represents a group of resources of the same type.
|
||||||
|
// It includes a description of the resource type and a map of resources.
|
||||||
|
type ResourceGroup struct {
|
||||||
|
Description ResourceDescription
|
||||||
|
Resources map[string]ConfigResource
|
||||||
|
}
|
||||||
|
|
||||||
|
// collectResourceMap collects resources of a specific type into a ResourceGroup.
|
||||||
|
func collectResourceMap[T ConfigResource](
|
||||||
|
description ResourceDescription,
|
||||||
|
input map[string]T,
|
||||||
|
) ResourceGroup {
|
||||||
|
resources := make(map[string]ConfigResource)
|
||||||
|
for key, resource := range input {
|
||||||
|
resources[key] = resource
|
||||||
|
}
|
||||||
|
return ResourceGroup{
|
||||||
|
Description: description,
|
||||||
|
Resources: resources,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllResources returns all resources in the bundle grouped by their resource type.
|
||||||
|
func (r *Resources) AllResources() []ResourceGroup {
|
||||||
|
descriptions := SupportedResources()
|
||||||
|
return []ResourceGroup{
|
||||||
|
collectResourceMap(descriptions["jobs"], r.Jobs),
|
||||||
|
collectResourceMap(descriptions["pipelines"], r.Pipelines),
|
||||||
|
collectResourceMap(descriptions["models"], r.Models),
|
||||||
|
collectResourceMap(descriptions["experiments"], r.Experiments),
|
||||||
|
collectResourceMap(descriptions["model_serving_endpoints"], r.ModelServingEndpoints),
|
||||||
|
collectResourceMap(descriptions["registered_models"], r.RegisteredModels),
|
||||||
|
collectResourceMap(descriptions["quality_monitors"], r.QualityMonitors),
|
||||||
|
collectResourceMap(descriptions["schemas"], r.Schemas),
|
||||||
|
collectResourceMap(descriptions["clusters"], r.Clusters),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) {
|
func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) {
|
||||||
|
@ -61,20 +109,71 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ResourceDescription struct {
|
type ResourceDescription struct {
|
||||||
|
// Singular and plural name when used to refer to the configuration.
|
||||||
SingularName string
|
SingularName string
|
||||||
|
PluralName string
|
||||||
|
|
||||||
|
// Singular and plural title when used in summaries / terminal UI.
|
||||||
|
SingularTitle string
|
||||||
|
PluralTitle string
|
||||||
}
|
}
|
||||||
|
|
||||||
// The keys of the map corresponds to the resource key in the bundle configuration.
|
// The keys of the map corresponds to the resource key in the bundle configuration.
|
||||||
func SupportedResources() map[string]ResourceDescription {
|
func SupportedResources() map[string]ResourceDescription {
|
||||||
return map[string]ResourceDescription{
|
return map[string]ResourceDescription{
|
||||||
"jobs": {SingularName: "job"},
|
"jobs": {
|
||||||
"pipelines": {SingularName: "pipeline"},
|
SingularName: "job",
|
||||||
"models": {SingularName: "model"},
|
PluralName: "jobs",
|
||||||
"experiments": {SingularName: "experiment"},
|
SingularTitle: "Job",
|
||||||
"model_serving_endpoints": {SingularName: "model_serving_endpoint"},
|
PluralTitle: "Jobs",
|
||||||
"registered_models": {SingularName: "registered_model"},
|
},
|
||||||
"quality_monitors": {SingularName: "quality_monitor"},
|
"pipelines": {
|
||||||
"schemas": {SingularName: "schema"},
|
SingularName: "pipeline",
|
||||||
"clusters": {SingularName: "cluster"},
|
PluralName: "pipelines",
|
||||||
|
SingularTitle: "Pipeline",
|
||||||
|
PluralTitle: "Pipelines",
|
||||||
|
},
|
||||||
|
"models": {
|
||||||
|
SingularName: "model",
|
||||||
|
PluralName: "models",
|
||||||
|
SingularTitle: "Model",
|
||||||
|
PluralTitle: "Models",
|
||||||
|
},
|
||||||
|
"experiments": {
|
||||||
|
SingularName: "experiment",
|
||||||
|
PluralName: "experiments",
|
||||||
|
SingularTitle: "Experiment",
|
||||||
|
PluralTitle: "Experiments",
|
||||||
|
},
|
||||||
|
"model_serving_endpoints": {
|
||||||
|
SingularName: "model_serving_endpoint",
|
||||||
|
PluralName: "model_serving_endpoints",
|
||||||
|
SingularTitle: "Model Serving Endpoint",
|
||||||
|
PluralTitle: "Model Serving Endpoints",
|
||||||
|
},
|
||||||
|
"registered_models": {
|
||||||
|
SingularName: "registered_model",
|
||||||
|
PluralName: "registered_models",
|
||||||
|
SingularTitle: "Registered Model",
|
||||||
|
PluralTitle: "Registered Models",
|
||||||
|
},
|
||||||
|
"quality_monitors": {
|
||||||
|
SingularName: "quality_monitor",
|
||||||
|
PluralName: "quality_monitors",
|
||||||
|
SingularTitle: "Quality Monitor",
|
||||||
|
PluralTitle: "Quality Monitors",
|
||||||
|
},
|
||||||
|
"schemas": {
|
||||||
|
SingularName: "schema",
|
||||||
|
PluralName: "schemas",
|
||||||
|
SingularTitle: "Schema",
|
||||||
|
PluralTitle: "Schemas",
|
||||||
|
},
|
||||||
|
"clusters": {
|
||||||
|
SingularName: "cluster",
|
||||||
|
PluralName: "clusters",
|
||||||
|
SingularTitle: "Cluster",
|
||||||
|
PluralTitle: "Clusters",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -13,6 +15,7 @@ type Cluster struct {
|
||||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||||
Permissions []Permission `json:"permissions,omitempty"`
|
Permissions []Permission `json:"permissions,omitempty"`
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
|
|
||||||
*compute.ClusterSpec
|
*compute.ClusterSpec
|
||||||
}
|
}
|
||||||
|
@ -37,3 +40,19 @@ func (s *Cluster) Exists(ctx context.Context, w *databricks.WorkspaceClient, id
|
||||||
func (s *Cluster) TerraformResourceName() string {
|
func (s *Cluster) TerraformResourceName() string {
|
||||||
return "databricks_cluster"
|
return "databricks_cluster"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Cluster) InitializeURL(baseURL url.URL) {
|
||||||
|
if s.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("compute/clusters/%s", s.ID)
|
||||||
|
s.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Cluster) GetName() string {
|
||||||
|
return s.ClusterName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Cluster) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
|
@ -14,6 +16,7 @@ type Job struct {
|
||||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||||
Permissions []Permission `json:"permissions,omitempty"`
|
Permissions []Permission `json:"permissions,omitempty"`
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
|
|
||||||
*jobs.JobSettings
|
*jobs.JobSettings
|
||||||
}
|
}
|
||||||
|
@ -44,3 +47,19 @@ func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id stri
|
||||||
func (j *Job) TerraformResourceName() string {
|
func (j *Job) TerraformResourceName() string {
|
||||||
return "databricks_job"
|
return "databricks_job"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (j *Job) InitializeURL(baseURL url.URL) {
|
||||||
|
if j.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("jobs/%s", j.ID)
|
||||||
|
j.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) GetName() string {
|
||||||
|
return j.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) GetURL() string {
|
||||||
|
return j.URL
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -13,6 +15,7 @@ type MlflowExperiment struct {
|
||||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||||
Permissions []Permission `json:"permissions,omitempty"`
|
Permissions []Permission `json:"permissions,omitempty"`
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
|
|
||||||
*ml.Experiment
|
*ml.Experiment
|
||||||
}
|
}
|
||||||
|
@ -39,3 +42,19 @@ func (s *MlflowExperiment) Exists(ctx context.Context, w *databricks.WorkspaceCl
|
||||||
func (s *MlflowExperiment) TerraformResourceName() string {
|
func (s *MlflowExperiment) TerraformResourceName() string {
|
||||||
return "databricks_mlflow_experiment"
|
return "databricks_mlflow_experiment"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *MlflowExperiment) InitializeURL(baseURL url.URL) {
|
||||||
|
if s.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("ml/experiments/%s", s.ID)
|
||||||
|
s.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *MlflowExperiment) GetName() string {
|
||||||
|
return s.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *MlflowExperiment) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -13,6 +15,7 @@ type MlflowModel struct {
|
||||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||||
Permissions []Permission `json:"permissions,omitempty"`
|
Permissions []Permission `json:"permissions,omitempty"`
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
|
|
||||||
*ml.Model
|
*ml.Model
|
||||||
}
|
}
|
||||||
|
@ -39,3 +42,19 @@ func (s *MlflowModel) Exists(ctx context.Context, w *databricks.WorkspaceClient,
|
||||||
func (s *MlflowModel) TerraformResourceName() string {
|
func (s *MlflowModel) TerraformResourceName() string {
|
||||||
return "databricks_mlflow_model"
|
return "databricks_mlflow_model"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *MlflowModel) InitializeURL(baseURL url.URL) {
|
||||||
|
if s.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("ml/models/%s", s.ID)
|
||||||
|
s.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *MlflowModel) GetName() string {
|
||||||
|
return s.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *MlflowModel) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -23,6 +25,7 @@ type ModelServingEndpoint struct {
|
||||||
Permissions []Permission `json:"permissions,omitempty"`
|
Permissions []Permission `json:"permissions,omitempty"`
|
||||||
|
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *ModelServingEndpoint) UnmarshalJSON(b []byte) error {
|
func (s *ModelServingEndpoint) UnmarshalJSON(b []byte) error {
|
||||||
|
@ -47,3 +50,19 @@ func (s *ModelServingEndpoint) Exists(ctx context.Context, w *databricks.Workspa
|
||||||
func (s *ModelServingEndpoint) TerraformResourceName() string {
|
func (s *ModelServingEndpoint) TerraformResourceName() string {
|
||||||
return "databricks_model_serving"
|
return "databricks_model_serving"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *ModelServingEndpoint) InitializeURL(baseURL url.URL) {
|
||||||
|
if s.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("ml/endpoints/%s", s.ID)
|
||||||
|
s.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ModelServingEndpoint) GetName() string {
|
||||||
|
return s.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *ModelServingEndpoint) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,8 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -13,6 +15,7 @@ type Pipeline struct {
|
||||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||||
Permissions []Permission `json:"permissions,omitempty"`
|
Permissions []Permission `json:"permissions,omitempty"`
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
|
|
||||||
*pipelines.PipelineSpec
|
*pipelines.PipelineSpec
|
||||||
}
|
}
|
||||||
|
@ -39,3 +42,19 @@ func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id
|
||||||
func (p *Pipeline) TerraformResourceName() string {
|
func (p *Pipeline) TerraformResourceName() string {
|
||||||
return "databricks_pipeline"
|
return "databricks_pipeline"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Pipeline) InitializeURL(baseURL url.URL) {
|
||||||
|
if p.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("pipelines/%s", p.ID)
|
||||||
|
p.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Pipeline) GetName() string {
|
||||||
|
return p.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Pipeline) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,9 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -20,6 +23,7 @@ type QualityMonitor struct {
|
||||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||||
|
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *QualityMonitor) UnmarshalJSON(b []byte) error {
|
func (s *QualityMonitor) UnmarshalJSON(b []byte) error {
|
||||||
|
@ -44,3 +48,19 @@ func (s *QualityMonitor) Exists(ctx context.Context, w *databricks.WorkspaceClie
|
||||||
func (s *QualityMonitor) TerraformResourceName() string {
|
func (s *QualityMonitor) TerraformResourceName() string {
|
||||||
return "databricks_quality_monitor"
|
return "databricks_quality_monitor"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *QualityMonitor) InitializeURL(baseURL url.URL) {
|
||||||
|
if s.TableName == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("explore/data/%s", strings.ReplaceAll(s.TableName, ".", "/"))
|
||||||
|
s.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *QualityMonitor) GetName() string {
|
||||||
|
return s.TableName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *QualityMonitor) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,9 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
@ -24,6 +27,7 @@ type RegisteredModel struct {
|
||||||
*catalog.CreateRegisteredModelRequest
|
*catalog.CreateRegisteredModelRequest
|
||||||
|
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *RegisteredModel) UnmarshalJSON(b []byte) error {
|
func (s *RegisteredModel) UnmarshalJSON(b []byte) error {
|
||||||
|
@ -48,3 +52,19 @@ func (s *RegisteredModel) Exists(ctx context.Context, w *databricks.WorkspaceCli
|
||||||
func (s *RegisteredModel) TerraformResourceName() string {
|
func (s *RegisteredModel) TerraformResourceName() string {
|
||||||
return "databricks_registered_model"
|
return "databricks_registered_model"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *RegisteredModel) InitializeURL(baseURL url.URL) {
|
||||||
|
if s.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("explore/data/models/%s", strings.ReplaceAll(s.ID, ".", "/"))
|
||||||
|
s.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *RegisteredModel) GetName() string {
|
||||||
|
return s.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *RegisteredModel) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
package resources
|
package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/databricks-sdk-go"
|
||||||
"github.com/databricks/databricks-sdk-go/marshal"
|
"github.com/databricks/databricks-sdk-go/marshal"
|
||||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
)
|
)
|
||||||
|
@ -16,6 +22,31 @@ type Schema struct {
|
||||||
*catalog.CreateSchema
|
*catalog.CreateSchema
|
||||||
|
|
||||||
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
|
||||||
|
URL string `json:"url,omitempty" bundle:"internal"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Schema) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
||||||
|
return false, fmt.Errorf("schema.Exists() is not supported")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Schema) TerraformResourceName() string {
|
||||||
|
return "databricks_schema"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Schema) InitializeURL(baseURL url.URL) {
|
||||||
|
if s.ID == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
baseURL.Path = fmt.Sprintf("explore/data/%s", strings.ReplaceAll(s.ID, ".", "/"))
|
||||||
|
s.URL = baseURL.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Schema) GetURL() string {
|
||||||
|
return s.URL
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Schema) GetName() string {
|
||||||
|
return s.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Schema) UnmarshalJSON(b []byte) error {
|
func (s *Schema) UnmarshalJSON(b []byte) error {
|
||||||
|
|
|
@ -63,17 +63,37 @@ func TestCustomMarshallerIsImplemented(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestResourcesAllResourcesCompleteness(t *testing.T) {
|
||||||
|
r := Resources{}
|
||||||
|
rt := reflect.TypeOf(r)
|
||||||
|
|
||||||
|
// Collect set of includes resource types
|
||||||
|
var types []string
|
||||||
|
for _, group := range r.AllResources() {
|
||||||
|
types = append(types, group.Description.PluralName)
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < rt.NumField(); i++ {
|
||||||
|
field := rt.Field(i)
|
||||||
|
jsonTag := field.Tag.Get("json")
|
||||||
|
|
||||||
|
if idx := strings.Index(jsonTag, ","); idx != -1 {
|
||||||
|
jsonTag = jsonTag[:idx]
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.Contains(t, types, jsonTag, "Field %s is missing in AllResources", field.Name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestSupportedResources(t *testing.T) {
|
func TestSupportedResources(t *testing.T) {
|
||||||
expected := map[string]ResourceDescription{}
|
// Please add your resource to the SupportedResources() function in resources.go if you add a new resource.
|
||||||
|
actual := SupportedResources()
|
||||||
|
|
||||||
typ := reflect.TypeOf(Resources{})
|
typ := reflect.TypeOf(Resources{})
|
||||||
for i := 0; i < typ.NumField(); i++ {
|
for i := 0; i < typ.NumField(); i++ {
|
||||||
field := typ.Field(i)
|
field := typ.Field(i)
|
||||||
jsonTags := strings.Split(field.Tag.Get("json"), ",")
|
jsonTags := strings.Split(field.Tag.Get("json"), ",")
|
||||||
singularName := strings.TrimSuffix(jsonTags[0], "s")
|
pluralName := jsonTags[0]
|
||||||
expected[jsonTags[0]] = ResourceDescription{SingularName: singularName}
|
assert.Equal(t, actual[pluralName].PluralName, pluralName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Please add your resource to the SupportedResources() function in resources.go
|
|
||||||
// if you are adding a new resource.
|
|
||||||
assert.Equal(t, expected, SupportedResources())
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,11 @@
|
||||||
package render
|
package render
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
|
|
||||||
|
@ -29,7 +31,7 @@ var renderFuncMap = template.FuncMap{
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const summaryTemplate = `{{- if .Name -}}
|
const summaryHeaderTemplate = `{{- if .Name -}}
|
||||||
Name: {{ .Name | bold }}
|
Name: {{ .Name | bold }}
|
||||||
{{- if .Target }}
|
{{- if .Target }}
|
||||||
Target: {{ .Target | bold }}
|
Target: {{ .Target | bold }}
|
||||||
|
@ -46,12 +48,30 @@ Workspace:
|
||||||
Path: {{ .Path | bold }}
|
Path: {{ .Path | bold }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
{{ end -}}`
|
||||||
|
|
||||||
{{ end -}}
|
const resourcesTemplate = `Resources:
|
||||||
|
{{- range . }}
|
||||||
{{ .Trailer }}
|
{{ .GroupName }}:
|
||||||
|
{{- range .Resources }}
|
||||||
|
{{ .Key | bold }}:
|
||||||
|
Name: {{ .Name }}
|
||||||
|
URL: {{ if .URL }}{{ .URL | cyan }}{{ else }}{{ "(not deployed)" | cyan }}{{ end }}
|
||||||
|
{{- end }}
|
||||||
|
{{- end }}
|
||||||
`
|
`
|
||||||
|
|
||||||
|
type ResourceGroup struct {
|
||||||
|
GroupName string
|
||||||
|
Resources []ResourceInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type ResourceInfo struct {
|
||||||
|
Key string
|
||||||
|
Name string
|
||||||
|
URL string
|
||||||
|
}
|
||||||
|
|
||||||
func pluralize(n int, singular, plural string) string {
|
func pluralize(n int, singular, plural string) string {
|
||||||
if n == 1 {
|
if n == 1 {
|
||||||
return fmt.Sprintf("%d %s", n, singular)
|
return fmt.Sprintf("%d %s", n, singular)
|
||||||
|
@ -74,20 +94,20 @@ func buildTrailer(diags diag.Diagnostics) string {
|
||||||
case len(parts) >= 3:
|
case len(parts) >= 3:
|
||||||
first := strings.Join(parts[:len(parts)-1], ", ")
|
first := strings.Join(parts[:len(parts)-1], ", ")
|
||||||
last := parts[len(parts)-1]
|
last := parts[len(parts)-1]
|
||||||
return fmt.Sprintf("Found %s, and %s", first, last)
|
return fmt.Sprintf("Found %s, and %s\n", first, last)
|
||||||
case len(parts) == 2:
|
case len(parts) == 2:
|
||||||
return fmt.Sprintf("Found %s and %s", parts[0], parts[1])
|
return fmt.Sprintf("Found %s and %s\n", parts[0], parts[1])
|
||||||
case len(parts) == 1:
|
case len(parts) == 1:
|
||||||
return fmt.Sprintf("Found %s", parts[0])
|
return fmt.Sprintf("Found %s\n", parts[0])
|
||||||
default:
|
default:
|
||||||
// No diagnostics to print.
|
// No diagnostics to print.
|
||||||
return color.GreenString("Validation OK!")
|
return color.GreenString("Validation OK!\n")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
|
func renderSummaryHeaderTemplate(out io.Writer, b *bundle.Bundle) error {
|
||||||
if b == nil {
|
if b == nil {
|
||||||
return renderSummaryTemplate(out, &bundle.Bundle{}, diags)
|
return renderSummaryHeaderTemplate(out, &bundle.Bundle{})
|
||||||
}
|
}
|
||||||
|
|
||||||
var currentUser = &iam.User{}
|
var currentUser = &iam.User{}
|
||||||
|
@ -98,20 +118,19 @@ func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnosti
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
t := template.Must(template.New("summary").Funcs(renderFuncMap).Parse(summaryTemplate))
|
t := template.Must(template.New("summary").Funcs(renderFuncMap).Parse(summaryHeaderTemplate))
|
||||||
err := t.Execute(out, map[string]any{
|
err := t.Execute(out, map[string]any{
|
||||||
"Name": b.Config.Bundle.Name,
|
"Name": b.Config.Bundle.Name,
|
||||||
"Target": b.Config.Bundle.Target,
|
"Target": b.Config.Bundle.Target,
|
||||||
"User": currentUser.UserName,
|
"User": currentUser.UserName,
|
||||||
"Path": b.Config.Workspace.RootPath,
|
"Path": b.Config.Workspace.RootPath,
|
||||||
"Host": b.Config.Workspace.Host,
|
"Host": b.Config.Workspace.Host,
|
||||||
"Trailer": buildTrailer(diags),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
|
func renderDiagnosticsOnly(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
|
||||||
for _, d := range diags {
|
for _, d := range diags {
|
||||||
for i := range d.Locations {
|
for i := range d.Locations {
|
||||||
if b == nil {
|
if b == nil {
|
||||||
|
@ -139,19 +158,73 @@ type RenderOptions struct {
|
||||||
RenderSummaryTable bool
|
RenderSummaryTable bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// RenderTextOutput renders the diagnostics in a human-readable format.
|
// RenderDiagnostics renders the diagnostics in a human-readable format.
|
||||||
func RenderTextOutput(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics, opts RenderOptions) error {
|
func RenderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics, opts RenderOptions) error {
|
||||||
err := renderDiagnostics(out, b, diags)
|
err := renderDiagnosticsOnly(out, b, diags)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to render diagnostics: %w", err)
|
return fmt.Errorf("failed to render diagnostics: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.RenderSummaryTable {
|
if opts.RenderSummaryTable {
|
||||||
err = renderSummaryTemplate(out, b, diags)
|
if b != nil {
|
||||||
if err != nil {
|
err = renderSummaryHeaderTemplate(out, b)
|
||||||
return fmt.Errorf("failed to render summary: %w", err)
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to render summary: %w", err)
|
||||||
|
}
|
||||||
|
io.WriteString(out, "\n")
|
||||||
}
|
}
|
||||||
|
trailer := buildTrailer(diags)
|
||||||
|
io.WriteString(out, trailer)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func RenderSummary(ctx context.Context, out io.Writer, b *bundle.Bundle) error {
|
||||||
|
if err := renderSummaryHeaderTemplate(out, b); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resourceGroups []ResourceGroup
|
||||||
|
|
||||||
|
for _, group := range b.Config.Resources.AllResources() {
|
||||||
|
resources := make([]ResourceInfo, 0, len(group.Resources))
|
||||||
|
for key, resource := range group.Resources {
|
||||||
|
resources = append(resources, ResourceInfo{
|
||||||
|
Key: key,
|
||||||
|
Name: resource.GetName(),
|
||||||
|
URL: resource.GetURL(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resources) > 0 {
|
||||||
|
resourceGroups = append(resourceGroups, ResourceGroup{
|
||||||
|
GroupName: group.Description.PluralTitle,
|
||||||
|
Resources: resources,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := renderResourcesTemplate(out, resourceGroups); err != nil {
|
||||||
|
return fmt.Errorf("failed to render resources template: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to sort and render resource groups using the template
|
||||||
|
func renderResourcesTemplate(out io.Writer, resourceGroups []ResourceGroup) error {
|
||||||
|
// Sort everything to ensure consistent output
|
||||||
|
sort.Slice(resourceGroups, func(i, j int) bool {
|
||||||
|
return resourceGroups[i].GroupName < resourceGroups[j].GroupName
|
||||||
|
})
|
||||||
|
for _, group := range resourceGroups {
|
||||||
|
sort.Slice(group.Resources, func(i, j int) bool {
|
||||||
|
return group.Resources[i].Key < group.Resources[j].Key
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
t := template.Must(template.New("resources").Funcs(renderFuncMap).Parse(resourcesTemplate))
|
||||||
|
|
||||||
|
return t.Execute(out, resourceGroups)
|
||||||
|
}
|
||||||
|
|
|
@ -2,14 +2,21 @@ package render
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
"github.com/databricks/cli/libs/diag"
|
"github.com/databricks/cli/libs/diag"
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
assert "github.com/databricks/cli/libs/dyn/dynassert"
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/serving"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -326,7 +333,7 @@ func TestRenderTextOutput(t *testing.T) {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
writer := &bytes.Buffer{}
|
writer := &bytes.Buffer{}
|
||||||
|
|
||||||
err := RenderTextOutput(writer, tc.bundle, tc.diags, tc.opts)
|
err := RenderDiagnostics(writer, tc.bundle, tc.diags, tc.opts)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, tc.expected, writer.String())
|
assert.Equal(t, tc.expected, writer.String())
|
||||||
|
@ -468,7 +475,7 @@ func TestRenderDiagnostics(t *testing.T) {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
writer := &bytes.Buffer{}
|
writer := &bytes.Buffer{}
|
||||||
|
|
||||||
err := renderDiagnostics(writer, bundle, tc.diags)
|
err := renderDiagnosticsOnly(writer, bundle, tc.diags)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, tc.expected, writer.String())
|
assert.Equal(t, tc.expected, writer.String())
|
||||||
|
@ -479,8 +486,105 @@ func TestRenderDiagnostics(t *testing.T) {
|
||||||
func TestRenderSummaryTemplate_nilBundle(t *testing.T) {
|
func TestRenderSummaryTemplate_nilBundle(t *testing.T) {
|
||||||
writer := &bytes.Buffer{}
|
writer := &bytes.Buffer{}
|
||||||
|
|
||||||
err := renderSummaryTemplate(writer, nil, nil)
|
err := renderSummaryHeaderTemplate(writer, nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
io.WriteString(writer, buildTrailer(nil))
|
||||||
|
|
||||||
assert.Equal(t, "Validation OK!\n", writer.String())
|
assert.Equal(t, "Validation OK!\n", writer.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRenderSummary(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Create a mock bundle with various resources
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Bundle: config.Bundle{
|
||||||
|
Name: "test-bundle",
|
||||||
|
Target: "test-target",
|
||||||
|
},
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
Host: "https://mycompany.databricks.com/",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"job1": {
|
||||||
|
ID: "1",
|
||||||
|
URL: "https://url1",
|
||||||
|
JobSettings: &jobs.JobSettings{Name: "job1-name"},
|
||||||
|
},
|
||||||
|
"job2": {
|
||||||
|
ID: "2",
|
||||||
|
URL: "https://url2",
|
||||||
|
JobSettings: &jobs.JobSettings{Name: "job2-name"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
|
"pipeline2": {
|
||||||
|
ID: "4",
|
||||||
|
// no URL
|
||||||
|
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline2-name"},
|
||||||
|
},
|
||||||
|
"pipeline1": {
|
||||||
|
ID: "3",
|
||||||
|
URL: "https://url3",
|
||||||
|
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1-name"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Schemas: map[string]*resources.Schema{
|
||||||
|
"schema1": {
|
||||||
|
ID: "catalog.schema",
|
||||||
|
CreateSchema: &catalog.CreateSchema{
|
||||||
|
Name: "schema",
|
||||||
|
},
|
||||||
|
// no URL
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
|
||||||
|
"endpoint1": {
|
||||||
|
ID: "7",
|
||||||
|
CreateServingEndpoint: &serving.CreateServingEndpoint{
|
||||||
|
Name: "my_serving_endpoint",
|
||||||
|
},
|
||||||
|
URL: "https://url4",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := &bytes.Buffer{}
|
||||||
|
err := RenderSummary(ctx, writer, b)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
expectedSummary := `Name: test-bundle
|
||||||
|
Target: test-target
|
||||||
|
Workspace:
|
||||||
|
Host: https://mycompany.databricks.com/
|
||||||
|
Resources:
|
||||||
|
Jobs:
|
||||||
|
job1:
|
||||||
|
Name: job1-name
|
||||||
|
URL: https://url1
|
||||||
|
job2:
|
||||||
|
Name: job2-name
|
||||||
|
URL: https://url2
|
||||||
|
Model Serving Endpoints:
|
||||||
|
endpoint1:
|
||||||
|
Name: my_serving_endpoint
|
||||||
|
URL: https://url4
|
||||||
|
Pipelines:
|
||||||
|
pipeline1:
|
||||||
|
Name: pipeline1-name
|
||||||
|
URL: https://url3
|
||||||
|
pipeline2:
|
||||||
|
Name: pipeline2-name
|
||||||
|
URL: (not deployed)
|
||||||
|
Schemas:
|
||||||
|
schema1:
|
||||||
|
Name: schema
|
||||||
|
URL: (not deployed)
|
||||||
|
`
|
||||||
|
assert.Equal(t, expectedSummary, writer.String())
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
bundle:
|
||||||
|
name: issue_1828
|
||||||
|
|
||||||
|
variables:
|
||||||
|
# One entry for each of the underlying YAML (or [dyn.Kind]) types.
|
||||||
|
# The test confirms we can convert to and from the typed configuration without losing information.
|
||||||
|
|
||||||
|
map:
|
||||||
|
default:
|
||||||
|
foo: bar
|
||||||
|
|
||||||
|
sequence:
|
||||||
|
default:
|
||||||
|
- foo
|
||||||
|
- bar
|
||||||
|
|
||||||
|
string:
|
||||||
|
default: foo
|
||||||
|
|
||||||
|
bool:
|
||||||
|
default: true
|
||||||
|
|
||||||
|
int:
|
||||||
|
default: 42
|
||||||
|
|
||||||
|
float:
|
||||||
|
default: 3.14
|
||||||
|
|
||||||
|
time:
|
||||||
|
default: 2021-01-01
|
||||||
|
|
||||||
|
nil:
|
||||||
|
default:
|
|
@ -0,0 +1,48 @@
|
||||||
|
package config_tests
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestIssue1828(t *testing.T) {
|
||||||
|
b := load(t, "./issue_1828")
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "map") {
|
||||||
|
assert.Equal(t, map[string]any{
|
||||||
|
"foo": "bar",
|
||||||
|
}, b.Config.Variables["map"].Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "sequence") {
|
||||||
|
assert.Equal(t, []any{
|
||||||
|
"foo",
|
||||||
|
"bar",
|
||||||
|
}, b.Config.Variables["sequence"].Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "string") {
|
||||||
|
assert.Equal(t, "foo", b.Config.Variables["string"].Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "bool") {
|
||||||
|
assert.Equal(t, true, b.Config.Variables["bool"].Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "int") {
|
||||||
|
assert.Equal(t, 42, b.Config.Variables["int"].Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "float") {
|
||||||
|
assert.Equal(t, 3.14, b.Config.Variables["float"].Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "time") {
|
||||||
|
assert.Equal(t, "2021-01-01", b.Config.Variables["time"].Default)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assert.Contains(t, b.Config.Variables, "nil") {
|
||||||
|
assert.Equal(t, nil, b.Config.Variables["nil"].Default)
|
||||||
|
}
|
||||||
|
}
|
|
@ -78,7 +78,7 @@ func newDeployCommand() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
renderOpts := render.RenderOptions{RenderSummaryTable: false}
|
renderOpts := render.RenderOptions{RenderSummaryTable: false}
|
||||||
err := render.RenderTextOutput(cmd.OutOrStdout(), b, diags, renderOpts)
|
err := render.RenderDiagnostics(cmd.OutOrStdout(), b, diags, renderOpts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to render output: %w", err)
|
return fmt.Errorf("failed to render output: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,10 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/databricks/cli/bundle/deploy/terraform"
|
"github.com/databricks/cli/bundle/deploy/terraform"
|
||||||
"github.com/databricks/cli/bundle/phases"
|
"github.com/databricks/cli/bundle/phases"
|
||||||
|
"github.com/databricks/cli/bundle/render"
|
||||||
"github.com/databricks/cli/cmd/bundle/utils"
|
"github.com/databricks/cli/cmd/bundle/utils"
|
||||||
"github.com/databricks/cli/cmd/root"
|
"github.com/databricks/cli/cmd/root"
|
||||||
"github.com/databricks/cli/libs/flags"
|
"github.com/databricks/cli/libs/flags"
|
||||||
|
@ -19,11 +21,8 @@ import (
|
||||||
func newSummaryCommand() *cobra.Command {
|
func newSummaryCommand() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "summary",
|
Use: "summary",
|
||||||
Short: "Describe the bundle resources and their deployment states",
|
Short: "Summarize resources deployed by this bundle",
|
||||||
Args: root.NoArgs,
|
Args: root.NoArgs,
|
||||||
|
|
||||||
// This command is currently intended for the Databricks VSCode extension only
|
|
||||||
Hidden: true,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var forcePull bool
|
var forcePull bool
|
||||||
|
@ -60,14 +59,15 @@ func newSummaryCommand() *cobra.Command {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
diags = bundle.Apply(ctx, b, terraform.Load())
|
diags = bundle.Apply(ctx, b,
|
||||||
|
bundle.Seq(terraform.Load(), mutator.InitializeURLs()))
|
||||||
if err := diags.Error(); err != nil {
|
if err := diags.Error(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
switch root.OutputType(cmd) {
|
switch root.OutputType(cmd) {
|
||||||
case flags.OutputText:
|
case flags.OutputText:
|
||||||
return fmt.Errorf("%w, only json output is supported", errors.ErrUnsupported)
|
return render.RenderSummary(ctx, cmd.OutOrStdout(), b)
|
||||||
case flags.OutputJSON:
|
case flags.OutputJSON:
|
||||||
buf, err := json.MarshalIndent(b.Config, "", " ")
|
buf, err := json.MarshalIndent(b.Config, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -54,7 +54,7 @@ func newValidateCommand() *cobra.Command {
|
||||||
switch root.OutputType(cmd) {
|
switch root.OutputType(cmd) {
|
||||||
case flags.OutputText:
|
case flags.OutputText:
|
||||||
renderOpts := render.RenderOptions{RenderSummaryTable: true}
|
renderOpts := render.RenderOptions{RenderSummaryTable: true}
|
||||||
err := render.RenderTextOutput(cmd.OutOrStdout(), b, diags, renderOpts)
|
err := render.RenderDiagnostics(cmd.OutOrStdout(), b, diags, renderOpts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to render output: %w", err)
|
return fmt.Errorf("failed to render output: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,8 +112,8 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) {
|
||||||
// assert mkdir fails
|
// assert mkdir fails
|
||||||
_, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello"))
|
_, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello"))
|
||||||
|
|
||||||
// Different cloud providers return different errors.
|
// Different cloud providers or cloud configurations return different errors.
|
||||||
regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`)
|
regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$|(^|: )"The specified path already exists.".*$`)
|
||||||
assert.Regexp(t, regex, err.Error())
|
assert.Regexp(t, regex, err.Error())
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/databricks/cli/cmd/root"
|
"github.com/databricks/cli/cmd/root"
|
||||||
|
"github.com/databricks/cli/internal/acc"
|
||||||
"github.com/databricks/cli/libs/flags"
|
"github.com/databricks/cli/libs/flags"
|
||||||
|
|
||||||
"github.com/databricks/cli/cmd"
|
"github.com/databricks/cli/cmd"
|
||||||
|
@ -591,13 +592,10 @@ func setupWsfsExtensionsFiler(t *testing.T) (filer.Filer, string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupDbfsFiler(t *testing.T) (filer.Filer, string) {
|
func setupDbfsFiler(t *testing.T) (filer.Filer, string) {
|
||||||
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV"))
|
_, wt := acc.WorkspaceTest(t)
|
||||||
|
|
||||||
w, err := databricks.NewWorkspaceClient()
|
tmpDir := TemporaryDbfsDir(t, wt.W)
|
||||||
require.NoError(t, err)
|
f, err := filer.NewDbfsClient(wt.W, tmpDir)
|
||||||
|
|
||||||
tmpDir := TemporaryDbfsDir(t, w)
|
|
||||||
f, err := filer.NewDbfsClient(w, tmpDir)
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
return f, path.Join("dbfs:/", tmpDir)
|
return f, path.Join("dbfs:/", tmpDir)
|
||||||
|
|
|
@ -398,6 +398,34 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d
|
||||||
return dyn.NewValue(out, src.Locations()), diags
|
return dyn.NewValue(out, src.Locations()), diags
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n normalizeOptions) normalizeInterface(typ reflect.Type, src dyn.Value, path dyn.Path) (dyn.Value, diag.Diagnostics) {
|
func (n normalizeOptions) normalizeInterface(_ reflect.Type, src dyn.Value, path dyn.Path) (dyn.Value, diag.Diagnostics) {
|
||||||
|
// Deal with every [dyn.Kind] here to ensure completeness.
|
||||||
|
switch src.Kind() {
|
||||||
|
case dyn.KindMap:
|
||||||
|
// Fall through
|
||||||
|
case dyn.KindSequence:
|
||||||
|
// Fall through
|
||||||
|
case dyn.KindString:
|
||||||
|
// Fall through
|
||||||
|
case dyn.KindBool:
|
||||||
|
// Fall through
|
||||||
|
case dyn.KindInt:
|
||||||
|
// Fall through
|
||||||
|
case dyn.KindFloat:
|
||||||
|
// Fall through
|
||||||
|
case dyn.KindTime:
|
||||||
|
// Conversion of a time value to an interface{}.
|
||||||
|
// The [dyn.Value.AsAny] equivalent for this kind is the [time.Time] struct.
|
||||||
|
// If we convert to a typed representation and back again, we cannot distinguish
|
||||||
|
// a [time.Time] struct from any other struct.
|
||||||
|
//
|
||||||
|
// Therefore, we normalize the time value to a string.
|
||||||
|
return dyn.NewValue(src.MustTime().String(), src.Locations()), nil
|
||||||
|
case dyn.KindNil:
|
||||||
|
// Fall through
|
||||||
|
default:
|
||||||
|
return dyn.InvalidValue, diag.Errorf("unsupported kind: %s", src.Kind())
|
||||||
|
}
|
||||||
|
|
||||||
return src, nil
|
return src, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -858,23 +858,7 @@ func TestNormalizeAnchors(t *testing.T) {
|
||||||
}, vout.AsAny())
|
}, vout.AsAny())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNormalizeBoolToAny(t *testing.T) {
|
func TestNormalizeAnyFromSlice(t *testing.T) {
|
||||||
var typ any
|
|
||||||
vin := dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
|
||||||
vout, err := Normalize(&typ, vin)
|
|
||||||
assert.Len(t, err, 0)
|
|
||||||
assert.Equal(t, dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNormalizeIntToAny(t *testing.T) {
|
|
||||||
var typ any
|
|
||||||
vin := dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
|
||||||
vout, err := Normalize(&typ, vin)
|
|
||||||
assert.Len(t, err, 0)
|
|
||||||
assert.Equal(t, dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNormalizeSliceToAny(t *testing.T) {
|
|
||||||
var typ any
|
var typ any
|
||||||
v1 := dyn.NewValue(1, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
v1 := dyn.NewValue(1, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
||||||
v2 := dyn.NewValue(2, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
v2 := dyn.NewValue(2, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
||||||
|
@ -883,3 +867,35 @@ func TestNormalizeSliceToAny(t *testing.T) {
|
||||||
assert.Len(t, err, 0)
|
assert.Len(t, err, 0)
|
||||||
assert.Equal(t, dyn.NewValue([]dyn.Value{v1, v2}, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
|
assert.Equal(t, dyn.NewValue([]dyn.Value{v1, v2}, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestNormalizeAnyFromString(t *testing.T) {
|
||||||
|
var typ any
|
||||||
|
vin := dyn.NewValue("string", []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
||||||
|
vout, err := Normalize(&typ, vin)
|
||||||
|
assert.Len(t, err, 0)
|
||||||
|
assert.Equal(t, dyn.NewValue("string", []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeAnyFromBool(t *testing.T) {
|
||||||
|
var typ any
|
||||||
|
vin := dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
||||||
|
vout, err := Normalize(&typ, vin)
|
||||||
|
assert.Len(t, err, 0)
|
||||||
|
assert.Equal(t, dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeAnyFromInt(t *testing.T) {
|
||||||
|
var typ any
|
||||||
|
vin := dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
||||||
|
vout, err := Normalize(&typ, vin)
|
||||||
|
assert.Len(t, err, 0)
|
||||||
|
assert.Equal(t, dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNormalizeAnyFromTime(t *testing.T) {
|
||||||
|
var typ any
|
||||||
|
vin := dyn.NewValue(dyn.MustTime("2024-08-29"), []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
||||||
|
vout, err := Normalize(&typ, vin)
|
||||||
|
assert.Empty(t, err)
|
||||||
|
assert.Equal(t, dyn.NewValue("2024-08-29", vin.Locations()), vout)
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,60 @@
|
||||||
|
package dynassert
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Dump returns the Go code to recreate the given value.
|
||||||
|
func Dump(v dyn.Value) string {
|
||||||
|
var sb strings.Builder
|
||||||
|
dump(v, &sb)
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func dump(v dyn.Value, sb *strings.Builder) {
|
||||||
|
sb.WriteString("dyn.NewValue(\n")
|
||||||
|
|
||||||
|
switch v.Kind() {
|
||||||
|
case dyn.KindMap:
|
||||||
|
sb.WriteString("map[string]dyn.Value{")
|
||||||
|
m := v.MustMap()
|
||||||
|
for _, p := range m.Pairs() {
|
||||||
|
sb.WriteString(fmt.Sprintf("\n%q: ", p.Key.MustString()))
|
||||||
|
dump(p.Value, sb)
|
||||||
|
sb.WriteByte(',')
|
||||||
|
}
|
||||||
|
sb.WriteString("\n},\n")
|
||||||
|
case dyn.KindSequence:
|
||||||
|
sb.WriteString("[]dyn.Value{\n")
|
||||||
|
for _, e := range v.MustSequence() {
|
||||||
|
dump(e, sb)
|
||||||
|
sb.WriteByte(',')
|
||||||
|
}
|
||||||
|
sb.WriteString("},\n")
|
||||||
|
case dyn.KindString:
|
||||||
|
sb.WriteString(fmt.Sprintf("%q,\n", v.MustString()))
|
||||||
|
case dyn.KindBool:
|
||||||
|
sb.WriteString(fmt.Sprintf("%t,\n", v.MustBool()))
|
||||||
|
case dyn.KindInt:
|
||||||
|
sb.WriteString(fmt.Sprintf("%d,\n", v.MustInt()))
|
||||||
|
case dyn.KindFloat:
|
||||||
|
sb.WriteString(fmt.Sprintf("%f,\n", v.MustFloat()))
|
||||||
|
case dyn.KindTime:
|
||||||
|
sb.WriteString(fmt.Sprintf("dyn.NewTime(%q),\n", v.MustTime().String()))
|
||||||
|
case dyn.KindNil:
|
||||||
|
sb.WriteString("nil,\n")
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("unhandled kind: %v", v.Kind()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add location
|
||||||
|
sb.WriteString("[]dyn.Location{")
|
||||||
|
for _, l := range v.Locations() {
|
||||||
|
sb.WriteString(fmt.Sprintf("{File: %q, Line: %d, Column: %d},", l.File, l.Line, l.Column))
|
||||||
|
}
|
||||||
|
sb.WriteString("},\n")
|
||||||
|
sb.WriteString(")")
|
||||||
|
}
|
|
@ -105,6 +105,9 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro
|
||||||
switch st {
|
switch st {
|
||||||
case "!!str":
|
case "!!str":
|
||||||
// OK
|
// OK
|
||||||
|
case "!!null":
|
||||||
|
// A literal unquoted "null" is treated as a null value by the YAML parser.
|
||||||
|
// However, when used as a key, it is treated as the string "null".
|
||||||
case "!!merge":
|
case "!!merge":
|
||||||
if merge != nil {
|
if merge != nil {
|
||||||
panic("merge node already set")
|
panic("merge node already set")
|
||||||
|
@ -115,10 +118,11 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro
|
||||||
return dyn.InvalidValue, errorf(loc, "invalid key tag: %v", st)
|
return dyn.InvalidValue, errorf(loc, "invalid key tag: %v", st)
|
||||||
}
|
}
|
||||||
|
|
||||||
k, err := d.load(key)
|
k := dyn.NewValue(key.Value, []dyn.Location{{
|
||||||
if err != nil {
|
File: d.path,
|
||||||
return dyn.InvalidValue, err
|
Line: key.Line,
|
||||||
}
|
Column: key.Column,
|
||||||
|
}})
|
||||||
|
|
||||||
v, err := d.load(val)
|
v, err := d.load(val)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -173,6 +177,14 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro
|
||||||
return dyn.NewValue(out, []dyn.Location{loc}), nil
|
return dyn.NewValue(out, []dyn.Location{loc}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newIntValue(i64 int64, loc dyn.Location) dyn.Value {
|
||||||
|
// Use regular int type instead of int64 if possible.
|
||||||
|
if i64 >= math.MinInt32 && i64 <= math.MaxInt32 {
|
||||||
|
return dyn.NewValue(int(i64), []dyn.Location{loc})
|
||||||
|
}
|
||||||
|
return dyn.NewValue(i64, []dyn.Location{loc})
|
||||||
|
}
|
||||||
|
|
||||||
func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error) {
|
func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error) {
|
||||||
st := node.ShortTag()
|
st := node.ShortTag()
|
||||||
switch st {
|
switch st {
|
||||||
|
@ -188,18 +200,44 @@ func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error
|
||||||
return dyn.InvalidValue, errorf(loc, "invalid bool value: %v", node.Value)
|
return dyn.InvalidValue, errorf(loc, "invalid bool value: %v", node.Value)
|
||||||
}
|
}
|
||||||
case "!!int":
|
case "!!int":
|
||||||
i64, err := strconv.ParseInt(node.Value, 10, 64)
|
// Try to parse the an integer value in base 10.
|
||||||
if err != nil {
|
// We trim leading zeros to avoid octal parsing of the "0" prefix.
|
||||||
return dyn.InvalidValue, errorf(loc, "invalid int value: %v", node.Value)
|
// See "testdata/spec_example_2.19.yml" for background.
|
||||||
|
i64, err := strconv.ParseInt(strings.TrimLeft(node.Value, "0"), 10, 64)
|
||||||
|
if err == nil {
|
||||||
|
return newIntValue(i64, loc), nil
|
||||||
}
|
}
|
||||||
// Use regular int type instead of int64 if possible.
|
// Let the [ParseInt] function figure out the base.
|
||||||
if i64 >= math.MinInt32 && i64 <= math.MaxInt32 {
|
i64, err = strconv.ParseInt(node.Value, 0, 64)
|
||||||
return dyn.NewValue(int(i64), []dyn.Location{loc}), nil
|
if err == nil {
|
||||||
|
return newIntValue(i64, loc), nil
|
||||||
}
|
}
|
||||||
return dyn.NewValue(i64, []dyn.Location{loc}), nil
|
return dyn.InvalidValue, errorf(loc, "invalid int value: %v", node.Value)
|
||||||
case "!!float":
|
case "!!float":
|
||||||
f64, err := strconv.ParseFloat(node.Value, 64)
|
f64, err := strconv.ParseFloat(node.Value, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
// Deal with infinity prefixes.
|
||||||
|
v := strings.ToLower(node.Value)
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(v, "+"):
|
||||||
|
v = strings.TrimPrefix(v, "+")
|
||||||
|
f64 = math.Inf(1)
|
||||||
|
case strings.HasPrefix(v, "-"):
|
||||||
|
v = strings.TrimPrefix(v, "-")
|
||||||
|
f64 = math.Inf(-1)
|
||||||
|
default:
|
||||||
|
// No prefix.
|
||||||
|
f64 = math.Inf(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deal with infinity and NaN values.
|
||||||
|
switch v {
|
||||||
|
case ".inf":
|
||||||
|
return dyn.NewValue(f64, []dyn.Location{loc}), nil
|
||||||
|
case ".nan":
|
||||||
|
return dyn.NewValue(math.NaN(), []dyn.Location{loc}), nil
|
||||||
|
}
|
||||||
|
|
||||||
return dyn.InvalidValue, errorf(loc, "invalid float value: %v", node.Value)
|
return dyn.InvalidValue, errorf(loc, "invalid float value: %v", node.Value)
|
||||||
}
|
}
|
||||||
return dyn.NewValue(f64, []dyn.Location{loc}), nil
|
return dyn.NewValue(f64, []dyn.Location{loc}), nil
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
# Example 2.1 Sequence of Scalars (ball players)
|
||||||
|
|
||||||
|
- Mark McGwire
|
||||||
|
- Sammy Sosa
|
||||||
|
- Ken Griffey
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Example 2.10 Node for “Sammy Sosa” appears twice in this document
|
||||||
|
|
||||||
|
---
|
||||||
|
hr:
|
||||||
|
- Mark McGwire
|
||||||
|
# Following node labeled SS
|
||||||
|
- &SS Sammy Sosa
|
||||||
|
rbi:
|
||||||
|
- *SS # Subsequent occurrence
|
||||||
|
- Ken Griffey
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Example 2.11 Mapping between Sequences
|
||||||
|
|
||||||
|
? - Detroit Tigers
|
||||||
|
- Chicago cubs
|
||||||
|
: - 2001-07-23
|
||||||
|
|
||||||
|
? [ New York Yankees,
|
||||||
|
Atlanta Braves ]
|
||||||
|
: [ 2001-07-02, 2001-08-12,
|
||||||
|
2001-08-14 ]
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Example 2.12 Compact Nested Mapping
|
||||||
|
|
||||||
|
---
|
||||||
|
# Products purchased
|
||||||
|
- item : Super Hoop
|
||||||
|
quantity: 1
|
||||||
|
- item : Basketball
|
||||||
|
quantity: 4
|
||||||
|
- item : Big Shoes
|
||||||
|
quantity: 1
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Example 2.13 In literals, newlines are preserved
|
||||||
|
|
||||||
|
# ASCII Art
|
||||||
|
--- |
|
||||||
|
\//||\/||
|
||||||
|
// || ||__
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Example 2.14 In the folded scalars, newlines become spaces
|
||||||
|
|
||||||
|
--- >
|
||||||
|
Mark McGwire's
|
||||||
|
year was crippled
|
||||||
|
by a knee injury.
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Example 2.15 Folded newlines are preserved for “more indented” and blank lines
|
||||||
|
|
||||||
|
--- >
|
||||||
|
Sammy Sosa completed another
|
||||||
|
fine season with great stats.
|
||||||
|
|
||||||
|
63 Home Runs
|
||||||
|
0.288 Batting Average
|
||||||
|
|
||||||
|
What a year!
|
|
@ -0,0 +1,9 @@
|
||||||
|
# Example 2.16 Indentation determines scope
|
||||||
|
|
||||||
|
name: Mark McGwire
|
||||||
|
accomplishment: >
|
||||||
|
Mark set a major league
|
||||||
|
home run record in 1998.
|
||||||
|
stats: |
|
||||||
|
65 Home Runs
|
||||||
|
0.278 Batting Average
|
|
@ -0,0 +1,9 @@
|
||||||
|
# Example 2.17 Quoted Scalars
|
||||||
|
|
||||||
|
unicode: "Sosa did fine.\u263A"
|
||||||
|
control: "\b1998\t1999\t2000\n"
|
||||||
|
hex esc: "\x0d\x0a is \r\n"
|
||||||
|
|
||||||
|
single: '"Howdy!" he cried.'
|
||||||
|
quoted: ' # Not a ''comment''.'
|
||||||
|
tie-fighter: '|\-*-/|'
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Example 2.18 Multi-line Flow Scalars
|
||||||
|
|
||||||
|
plain:
|
||||||
|
This unquoted scalar
|
||||||
|
spans many lines.
|
||||||
|
|
||||||
|
quoted: "So does this
|
||||||
|
quoted scalar.\n"
|
|
@ -0,0 +1,15 @@
|
||||||
|
# Example 2.19 Integers
|
||||||
|
|
||||||
|
canonical: 12345
|
||||||
|
decimal: +12345
|
||||||
|
octal: 0o14
|
||||||
|
hexadecimal: 0xC
|
||||||
|
|
||||||
|
# Note: this example is not part of the spec but added for completeness.
|
||||||
|
#
|
||||||
|
# Octal numbers:
|
||||||
|
# - YAML 1.1: prefix is "0"
|
||||||
|
# - YAML 1.2: prefix is "0o"
|
||||||
|
# The "gopkg.in/yaml.v3" package accepts both for backwards compat.
|
||||||
|
# We accept only the YAML 1.2 prefix "0o".
|
||||||
|
octal11: 012345
|
|
@ -0,0 +1,5 @@
|
||||||
|
# Example 2.2 Mapping Scalars to Scalars (player statistics)
|
||||||
|
|
||||||
|
hr: 65 # Home runs
|
||||||
|
avg: 0.278 # Batting average
|
||||||
|
rbi: 147 # Runs Batted In
|
|
@ -0,0 +1,7 @@
|
||||||
|
# Example 2.20 Floating Point
|
||||||
|
|
||||||
|
canonical: 1.23015e+3
|
||||||
|
exponential: 12.3015e+02
|
||||||
|
fixed: 1230.15
|
||||||
|
negative infinity: -.inf
|
||||||
|
not a number: .nan
|
|
@ -0,0 +1,5 @@
|
||||||
|
# Example 2.21 Miscellaneous
|
||||||
|
|
||||||
|
null:
|
||||||
|
booleans: [ true, false ]
|
||||||
|
string: '012345'
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Example 2.22 Timestamps
|
||||||
|
|
||||||
|
canonical: 2001-12-15T02:59:43.1Z
|
||||||
|
iso8601: 2001-12-14t21:59:43.10-05:00
|
||||||
|
spaced: 2001-12-14 21:59:43.10 -5
|
||||||
|
date: 2002-12-14
|
|
@ -0,0 +1,15 @@
|
||||||
|
# Example 2.23 Various Explicit Tags
|
||||||
|
|
||||||
|
---
|
||||||
|
not-date: !!str 2002-04-28
|
||||||
|
|
||||||
|
picture: !!binary |
|
||||||
|
R0lGODlhDAAMAIQAAP//9/X
|
||||||
|
17unp5WZmZgAAAOfn515eXv
|
||||||
|
Pz7Y6OjuDg4J+fn5OTk6enp
|
||||||
|
56enmleECcgggoBADs=
|
||||||
|
|
||||||
|
application specific tag: !something |
|
||||||
|
The semantics of the tag
|
||||||
|
above may be different for
|
||||||
|
different documents.
|
|
@ -0,0 +1,16 @@
|
||||||
|
# Example 2.24 Global Tags
|
||||||
|
|
||||||
|
%TAG ! tag:clarkevans.com,2002:
|
||||||
|
--- !shape
|
||||||
|
# Use the ! handle for presenting
|
||||||
|
# tag:clarkevans.com,2002:circle
|
||||||
|
- !circle
|
||||||
|
center: &ORIGIN {x: 73, y: 129}
|
||||||
|
radius: 7
|
||||||
|
- !line
|
||||||
|
start: *ORIGIN
|
||||||
|
finish: { x: 89, y: 102 }
|
||||||
|
- !label
|
||||||
|
start: *ORIGIN
|
||||||
|
color: 0xFFEEBB
|
||||||
|
text: Pretty vector drawing.
|
|
@ -0,0 +1,9 @@
|
||||||
|
# Example 2.25 Unordered Sets
|
||||||
|
|
||||||
|
# Sets are represented as a
|
||||||
|
# Mapping where each key is
|
||||||
|
# associated with a null value
|
||||||
|
--- !!set
|
||||||
|
? Mark McGwire
|
||||||
|
? Sammy Sosa
|
||||||
|
? Ken Griffey
|
|
@ -0,0 +1,9 @@
|
||||||
|
# Example 2.26 Ordered Mappings
|
||||||
|
|
||||||
|
# Ordered maps are represented as
|
||||||
|
# A sequence of mappings, with
|
||||||
|
# each mapping having one key
|
||||||
|
--- !!omap
|
||||||
|
- Mark McGwire: 65
|
||||||
|
- Sammy Sosa: 63
|
||||||
|
- Ken Griffey: 58
|
|
@ -0,0 +1,31 @@
|
||||||
|
# Example 2.27 Invoice
|
||||||
|
|
||||||
|
--- !<tag:clarkevans.com,2002:invoice>
|
||||||
|
invoice: 34843
|
||||||
|
date : 2001-01-23
|
||||||
|
bill-to: &id001
|
||||||
|
given : Chris
|
||||||
|
family : Dumars
|
||||||
|
address:
|
||||||
|
lines: |
|
||||||
|
458 Walkman Dr.
|
||||||
|
Suite #292
|
||||||
|
city : Royal Oak
|
||||||
|
state : MI
|
||||||
|
postal : 48046
|
||||||
|
ship-to: *id001
|
||||||
|
product:
|
||||||
|
- sku : BL394D
|
||||||
|
quantity : 4
|
||||||
|
description : Basketball
|
||||||
|
price : 450.00
|
||||||
|
- sku : BL4438H
|
||||||
|
quantity : 1
|
||||||
|
description : Super Hoop
|
||||||
|
price : 2392.00
|
||||||
|
tax : 251.42
|
||||||
|
total: 4443.52
|
||||||
|
comments:
|
||||||
|
Late afternoon is best.
|
||||||
|
Backup contact is Nancy
|
||||||
|
Billsmer @ 338-4338.
|
|
@ -0,0 +1,28 @@
|
||||||
|
# Example 2.28 Log File
|
||||||
|
|
||||||
|
---
|
||||||
|
Time: 2001-11-23 15:01:42 -5
|
||||||
|
User: ed
|
||||||
|
Warning:
|
||||||
|
This is an error message
|
||||||
|
for the log file
|
||||||
|
---
|
||||||
|
Time: 2001-11-23 15:02:31 -5
|
||||||
|
User: ed
|
||||||
|
Warning:
|
||||||
|
A slightly different error
|
||||||
|
message.
|
||||||
|
---
|
||||||
|
Date: 2001-11-23 15:03:17 -5
|
||||||
|
User: ed
|
||||||
|
Fatal:
|
||||||
|
Unknown variable "bar"
|
||||||
|
Stack:
|
||||||
|
- file: TopClass.py
|
||||||
|
line: 23
|
||||||
|
code: |
|
||||||
|
x = MoreObject("345\n")
|
||||||
|
- file: MoreClass.py
|
||||||
|
line: 58
|
||||||
|
code: |-
|
||||||
|
foo = bar
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Example 2.3 Mapping Scalars to Sequences (ball clubs in each league)
|
||||||
|
|
||||||
|
american:
|
||||||
|
- Boston Red Sox
|
||||||
|
- Detroit Tigers
|
||||||
|
- New York Yankees
|
||||||
|
national:
|
||||||
|
- New York Mets
|
||||||
|
- Chicago Cubs
|
||||||
|
- Atlanta Braves
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Example 2.4 Sequence of Mappings (players’ statistics)
|
||||||
|
|
||||||
|
-
|
||||||
|
name: Mark McGwire
|
||||||
|
hr: 65
|
||||||
|
avg: 0.278
|
||||||
|
-
|
||||||
|
name: Sammy Sosa
|
||||||
|
hr: 63
|
||||||
|
avg: 0.288
|
|
@ -0,0 +1,5 @@
|
||||||
|
# Example 2.5 Sequence of Sequences
|
||||||
|
|
||||||
|
- [name , hr, avg ]
|
||||||
|
- [Mark McGwire, 65, 0.278]
|
||||||
|
- [Sammy Sosa , 63, 0.288]
|
|
@ -0,0 +1,7 @@
|
||||||
|
# Example 2.6 Mapping of Mappings
|
||||||
|
|
||||||
|
Mark McGwire: {hr: 65, avg: 0.278}
|
||||||
|
Sammy Sosa: {
|
||||||
|
hr: 63,
|
||||||
|
avg: 0.288,
|
||||||
|
}
|
|
@ -0,0 +1,12 @@
|
||||||
|
# Example 2.7 Two Documents in a Stream (each with a leading comment)
|
||||||
|
|
||||||
|
# Ranking of 1998 home runs
|
||||||
|
---
|
||||||
|
- Mark McGwire
|
||||||
|
- Sammy Sosa
|
||||||
|
- Ken Griffey
|
||||||
|
|
||||||
|
# Team ranking
|
||||||
|
---
|
||||||
|
- Chicago Cubs
|
||||||
|
- St Louis Cardinals
|
|
@ -0,0 +1,12 @@
|
||||||
|
# Example 2.8 Play by Play Feed from a Game
|
||||||
|
|
||||||
|
---
|
||||||
|
time: 20:03:20
|
||||||
|
player: Sammy Sosa
|
||||||
|
action: strike (miss)
|
||||||
|
...
|
||||||
|
---
|
||||||
|
time: 20:03:47
|
||||||
|
player: Sammy Sosa
|
||||||
|
action: grand slam
|
||||||
|
...
|
|
@ -0,0 +1,10 @@
|
||||||
|
# Example 2.9 Single Document with Two Comments
|
||||||
|
|
||||||
|
---
|
||||||
|
hr: # 1998 hr ranking
|
||||||
|
- Mark McGwire
|
||||||
|
- Sammy Sosa
|
||||||
|
# 1998 rbi ranking
|
||||||
|
rbi:
|
||||||
|
- Sammy Sosa
|
||||||
|
- Ken Griffey
|
|
@ -0,0 +1,821 @@
|
||||||
|
package yamlloader_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"math"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
assert "github.com/databricks/cli/libs/dyn/dynassert"
|
||||||
|
"github.com/databricks/cli/libs/dyn/yamlloader"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
const NL = "\n"
|
||||||
|
|
||||||
|
func loadExample(t *testing.T, file string) dyn.Value {
|
||||||
|
input, err := os.ReadFile(file)
|
||||||
|
require.NoError(t, err)
|
||||||
|
self, err := yamlloader.LoadYAML(file, bytes.NewBuffer(input))
|
||||||
|
require.NoError(t, err)
|
||||||
|
return self
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_1(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.1.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 3, Column: 3}}),
|
||||||
|
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 4, Column: 3}}),
|
||||||
|
dyn.NewValue("Ken Griffey", []dyn.Location{{File: file, Line: 5, Column: 3}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_2(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.2.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"hr": dyn.NewValue(65, []dyn.Location{{File: file, Line: 3, Column: 6}}),
|
||||||
|
"avg": dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 4, Column: 6}}),
|
||||||
|
"rbi": dyn.NewValue(147, []dyn.Location{{File: file, Line: 5, Column: 6}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_3(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.3.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"american": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("Boston Red Sox", []dyn.Location{{File: file, Line: 4, Column: 3}}),
|
||||||
|
dyn.NewValue("Detroit Tigers", []dyn.Location{{File: file, Line: 5, Column: 3}}),
|
||||||
|
dyn.NewValue("New York Yankees", []dyn.Location{{File: file, Line: 6, Column: 3}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 1}},
|
||||||
|
),
|
||||||
|
"national": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("New York Mets", []dyn.Location{{File: file, Line: 8, Column: 3}}),
|
||||||
|
dyn.NewValue("Chicago Cubs", []dyn.Location{{File: file, Line: 9, Column: 3}}),
|
||||||
|
dyn.NewValue("Atlanta Braves", []dyn.Location{{File: file, Line: 10, Column: 3}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 1}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_4(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.4.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"name": dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 4, Column: 9}}),
|
||||||
|
"hr": dyn.NewValue(65, []dyn.Location{{File: file, Line: 5, Column: 9}}),
|
||||||
|
"avg": dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 6, Column: 9}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"name": dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 8, Column: 9}}),
|
||||||
|
"hr": dyn.NewValue(63, []dyn.Location{{File: file, Line: 9, Column: 9}}),
|
||||||
|
"avg": dyn.NewValue(0.288, []dyn.Location{{File: file, Line: 10, Column: 9}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_5(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.5.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("name", []dyn.Location{{File: file, Line: 3, Column: 4}}),
|
||||||
|
dyn.NewValue("hr", []dyn.Location{{File: file, Line: 3, Column: 18}}),
|
||||||
|
dyn.NewValue("avg", []dyn.Location{{File: file, Line: 3, Column: 22}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 4, Column: 4}}),
|
||||||
|
dyn.NewValue(65, []dyn.Location{{File: file, Line: 4, Column: 18}}),
|
||||||
|
dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 4, Column: 22}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 5, Column: 4}}),
|
||||||
|
dyn.NewValue(63, []dyn.Location{{File: file, Line: 5, Column: 18}}),
|
||||||
|
dyn.NewValue(0.288, []dyn.Location{{File: file, Line: 5, Column: 22}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_6(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.6.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"Mark McGwire": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"hr": dyn.NewValue(65, []dyn.Location{{File: file, Line: 3, Column: 20}}),
|
||||||
|
"avg": dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 3, Column: 29}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 15}},
|
||||||
|
),
|
||||||
|
"Sammy Sosa": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"hr": dyn.NewValue(63, []dyn.Location{{File: file, Line: 5, Column: 9}}),
|
||||||
|
"avg": dyn.NewValue(0.288, []dyn.Location{{File: file, Line: 6, Column: 10}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 13}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_7(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.7.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
// Note: we do not support multiple documents in a single YAML file.
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(
|
||||||
|
"Mark McGwire",
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
"Sammy Sosa",
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
"Ken Griffey",
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_8(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.8.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
// Note: we do not support multiple documents in a single YAML file.
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"time": dyn.NewValue("20:03:20", []dyn.Location{{File: file, Line: 4, Column: 7}}),
|
||||||
|
"player": dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 5, Column: 9}}),
|
||||||
|
"action": dyn.NewValue("strike (miss)", []dyn.Location{{File: file, Line: 6, Column: 9}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_9(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.9.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
// Note: we do not support multiple documents in a single YAML file.
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"hr": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 5, Column: 3}}),
|
||||||
|
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 6, Column: 3}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 1}},
|
||||||
|
),
|
||||||
|
"rbi": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 9, Column: 3}}),
|
||||||
|
dyn.NewValue("Ken Griffey", []dyn.Location{{File: file, Line: 10, Column: 3}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 9, Column: 1}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_10(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.10.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"hr": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 5, Column: 3}}),
|
||||||
|
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 7, Column: 3}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 1}},
|
||||||
|
),
|
||||||
|
"rbi": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
// The location for an anchored value refers to the anchor, not the reference.
|
||||||
|
// This is the same location as the anchor that appears in the "hr" mapping.
|
||||||
|
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 7, Column: 3}}),
|
||||||
|
dyn.NewValue("Ken Griffey", []dyn.Location{{File: file, Line: 10, Column: 3}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 9, Column: 1}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_11(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.11.yml"
|
||||||
|
input, err := os.ReadFile(file)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Note: non-string mapping keys are not supported by "gopkg.in/yaml.v3".
|
||||||
|
_, err = yamlloader.LoadYAML(file, bytes.NewBuffer(input))
|
||||||
|
assert.ErrorContains(t, err, `: key is not a scalar`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_12(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.12.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"item": dyn.NewValue("Super Hoop", []dyn.Location{{File: file, Line: 5, Column: 13}}),
|
||||||
|
"quantity": dyn.NewValue(1, []dyn.Location{{File: file, Line: 6, Column: 13}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"item": dyn.NewValue("Basketball", []dyn.Location{{File: file, Line: 7, Column: 13}}),
|
||||||
|
"quantity": dyn.NewValue(4, []dyn.Location{{File: file, Line: 8, Column: 13}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"item": dyn.NewValue("Big Shoes", []dyn.Location{{File: file, Line: 9, Column: 13}}),
|
||||||
|
"quantity": dyn.NewValue(1, []dyn.Location{{File: file, Line: 10, Column: 13}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 9, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_13(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.13.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
``+
|
||||||
|
`\//||\/||`+NL+
|
||||||
|
"// || ||__"+NL,
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 5}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_14(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.14.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
`Mark McGwire's year was crippled by a knee injury.`+NL,
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 5}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_15(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.15.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
``+
|
||||||
|
`Sammy Sosa completed another fine season with great stats.`+NL+
|
||||||
|
NL+
|
||||||
|
` 63 Home Runs`+NL+
|
||||||
|
` 0.288 Batting Average`+NL+
|
||||||
|
NL+
|
||||||
|
`What a year!`+NL,
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 5}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_16(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.16.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"name": dyn.NewValue(
|
||||||
|
"Mark McGwire",
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 7}},
|
||||||
|
),
|
||||||
|
"accomplishment": dyn.NewValue(
|
||||||
|
`Mark set a major league home run record in 1998.`+NL,
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 17}},
|
||||||
|
),
|
||||||
|
"stats": dyn.NewValue(
|
||||||
|
``+
|
||||||
|
`65 Home Runs`+NL+
|
||||||
|
`0.278 Batting Average`+NL,
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 8}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_17(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.17.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"unicode": dyn.NewValue(
|
||||||
|
`Sosa did fine.`+"\u263A",
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 10}},
|
||||||
|
),
|
||||||
|
"control": dyn.NewValue(
|
||||||
|
"\b1998\t1999\t2000\n",
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 10}},
|
||||||
|
),
|
||||||
|
"hex esc": dyn.NewValue(
|
||||||
|
"\x0d\x0a is \r\n",
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 10}},
|
||||||
|
),
|
||||||
|
"single": dyn.NewValue(
|
||||||
|
`"Howdy!" he cried.`,
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 9}},
|
||||||
|
),
|
||||||
|
"quoted": dyn.NewValue(
|
||||||
|
` # Not a 'comment'.`,
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 9}},
|
||||||
|
),
|
||||||
|
"tie-fighter": dyn.NewValue(
|
||||||
|
`|\-*-/|`,
|
||||||
|
[]dyn.Location{{File: file, Line: 9, Column: 14}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_18(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.18.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"plain": dyn.NewValue(
|
||||||
|
`This unquoted scalar spans many lines.`,
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 3}},
|
||||||
|
),
|
||||||
|
"quoted": dyn.NewValue(
|
||||||
|
`So does this quoted scalar.`+NL,
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 9}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_19(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.19.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"canonical": dyn.NewValue(
|
||||||
|
12345,
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 12}},
|
||||||
|
),
|
||||||
|
"decimal": dyn.NewValue(
|
||||||
|
12345,
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 10}},
|
||||||
|
),
|
||||||
|
"octal": dyn.NewValue(
|
||||||
|
12,
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 8}},
|
||||||
|
),
|
||||||
|
"hexadecimal": dyn.NewValue(
|
||||||
|
12,
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 14}},
|
||||||
|
),
|
||||||
|
"octal11": dyn.NewValue(
|
||||||
|
12345,
|
||||||
|
[]dyn.Location{{File: file, Line: 15, Column: 10}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_20(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.20.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
// Equality assertion doesn't work with NaNs.
|
||||||
|
// See https://github.com/stretchr/testify/issues/624.
|
||||||
|
//
|
||||||
|
// Remove the NaN entry.
|
||||||
|
self, _ = dyn.Walk(self, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||||
|
if f, ok := v.AsFloat(); ok && math.IsNaN(f) {
|
||||||
|
return dyn.InvalidValue, dyn.ErrDrop
|
||||||
|
}
|
||||||
|
return v, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"canonical": dyn.NewValue(
|
||||||
|
1230.15,
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 12}},
|
||||||
|
),
|
||||||
|
"exponential": dyn.NewValue(
|
||||||
|
1230.15,
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 14}},
|
||||||
|
),
|
||||||
|
"fixed": dyn.NewValue(
|
||||||
|
1230.15,
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 8}},
|
||||||
|
),
|
||||||
|
"negative infinity": dyn.NewValue(
|
||||||
|
math.Inf(-1),
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 20}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_21(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.21.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"null": dyn.NewValue(
|
||||||
|
nil,
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 6}},
|
||||||
|
),
|
||||||
|
"booleans": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(true, []dyn.Location{{File: file, Line: 4, Column: 13}}),
|
||||||
|
dyn.NewValue(false, []dyn.Location{{File: file, Line: 4, Column: 19}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 11}},
|
||||||
|
),
|
||||||
|
"string": dyn.NewValue(
|
||||||
|
"012345",
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 9}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_22(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.22.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"canonical": dyn.NewValue(
|
||||||
|
dyn.MustTime("2001-12-15T02:59:43.1Z"),
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 12}},
|
||||||
|
),
|
||||||
|
"iso8601": dyn.NewValue(
|
||||||
|
dyn.MustTime("2001-12-14t21:59:43.10-05:00"),
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 10}},
|
||||||
|
),
|
||||||
|
"spaced": dyn.NewValue(
|
||||||
|
// This is parsed as a string, not a timestamp,
|
||||||
|
// both by "gopkg.in/yaml.v3" and by our implementation.
|
||||||
|
"2001-12-14 21:59:43.10 -5",
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 9}},
|
||||||
|
),
|
||||||
|
"date": dyn.NewValue(
|
||||||
|
dyn.MustTime("2002-12-14"),
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 7}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_23(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.23.yml"
|
||||||
|
input, err := os.ReadFile(file)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Note: the !!binary tag is not supported by us.
|
||||||
|
|
||||||
|
_, err = yamlloader.LoadYAML(file, bytes.NewBuffer(input))
|
||||||
|
assert.ErrorContains(t, err, `: unknown tag: !!binary`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_24(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.24.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"center": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"x": dyn.NewValue(73, []dyn.Location{{File: file, Line: 8, Column: 23}}),
|
||||||
|
"y": dyn.NewValue(129, []dyn.Location{{File: file, Line: 8, Column: 30}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 11}},
|
||||||
|
),
|
||||||
|
"radius": dyn.NewValue(7, []dyn.Location{{File: file, Line: 9, Column: 11}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"start": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"x": dyn.NewValue(73, []dyn.Location{{File: file, Line: 8, Column: 23}}),
|
||||||
|
"y": dyn.NewValue(129, []dyn.Location{{File: file, Line: 8, Column: 30}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 11}},
|
||||||
|
),
|
||||||
|
"finish": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"x": dyn.NewValue(89, []dyn.Location{{File: file, Line: 12, Column: 16}}),
|
||||||
|
"y": dyn.NewValue(102, []dyn.Location{{File: file, Line: 12, Column: 23}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 12, Column: 11}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 10, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"start": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"x": dyn.NewValue(73, []dyn.Location{{File: file, Line: 8, Column: 23}}),
|
||||||
|
"y": dyn.NewValue(129, []dyn.Location{{File: file, Line: 8, Column: 30}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 11}},
|
||||||
|
),
|
||||||
|
"color": dyn.NewValue(16772795, []dyn.Location{{File: file, Line: 15, Column: 10}}),
|
||||||
|
"text": dyn.NewValue("Pretty vector drawing.", []dyn.Location{{File: file, Line: 16, Column: 9}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 13, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 5}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_25(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.25.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"Mark McGwire": dyn.NewValue(nil, []dyn.Location{{File: file, Line: 8, Column: 1}}),
|
||||||
|
"Sammy Sosa": dyn.NewValue(nil, []dyn.Location{{File: file, Line: 9, Column: 1}}),
|
||||||
|
"Ken Griffey": dyn.NewValue(nil, []dyn.Location{{File: file, Line: 10, Column: 1}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 5}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_26(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.26.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"Mark McGwire": dyn.NewValue(65, []dyn.Location{{File: file, Line: 7, Column: 17}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"Sammy Sosa": dyn.NewValue(63, []dyn.Location{{File: file, Line: 8, Column: 15}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 3}},
|
||||||
|
),
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"Ken Griffey": dyn.NewValue(58, []dyn.Location{{File: file, Line: 9, Column: 16}}),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 9, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 5}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_27(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.27.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"invoice": dyn.NewValue(
|
||||||
|
34843,
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 10}},
|
||||||
|
),
|
||||||
|
"date": dyn.NewValue(
|
||||||
|
dyn.MustTime("2001-01-23"),
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 10}},
|
||||||
|
),
|
||||||
|
"bill-to": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"given": dyn.NewValue(
|
||||||
|
"Chris",
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 12}},
|
||||||
|
),
|
||||||
|
"family": dyn.NewValue(
|
||||||
|
"Dumars",
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 12}},
|
||||||
|
),
|
||||||
|
"address": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"lines": dyn.NewValue(
|
||||||
|
"458 Walkman Dr.\nSuite #292\n",
|
||||||
|
[]dyn.Location{{File: file, Line: 10, Column: 12}},
|
||||||
|
),
|
||||||
|
"city": dyn.NewValue(
|
||||||
|
"Royal Oak",
|
||||||
|
[]dyn.Location{{File: file, Line: 13, Column: 15}},
|
||||||
|
),
|
||||||
|
"state": dyn.NewValue(
|
||||||
|
"MI",
|
||||||
|
[]dyn.Location{{File: file, Line: 14, Column: 15}},
|
||||||
|
),
|
||||||
|
"postal": dyn.NewValue(
|
||||||
|
48046,
|
||||||
|
[]dyn.Location{{File: file, Line: 15, Column: 15}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 10, Column: 5}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 10}},
|
||||||
|
),
|
||||||
|
"ship-to": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"given": dyn.NewValue(
|
||||||
|
"Chris",
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 12}},
|
||||||
|
),
|
||||||
|
"family": dyn.NewValue(
|
||||||
|
"Dumars",
|
||||||
|
[]dyn.Location{{File: file, Line: 8, Column: 12}},
|
||||||
|
),
|
||||||
|
"address": dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"lines": dyn.NewValue(
|
||||||
|
"458 Walkman Dr.\nSuite #292\n",
|
||||||
|
[]dyn.Location{{File: file, Line: 10, Column: 12}},
|
||||||
|
),
|
||||||
|
"city": dyn.NewValue(
|
||||||
|
"Royal Oak",
|
||||||
|
[]dyn.Location{{File: file, Line: 13, Column: 15}},
|
||||||
|
),
|
||||||
|
"state": dyn.NewValue(
|
||||||
|
"MI",
|
||||||
|
[]dyn.Location{{File: file, Line: 14, Column: 15}},
|
||||||
|
),
|
||||||
|
"postal": dyn.NewValue(
|
||||||
|
48046,
|
||||||
|
[]dyn.Location{{File: file, Line: 15, Column: 15}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 10, Column: 5}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 6, Column: 10}},
|
||||||
|
),
|
||||||
|
"product": dyn.NewValue(
|
||||||
|
[]dyn.Value{
|
||||||
|
dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"sku": dyn.NewValue(
|
||||||
|
"BL394D",
|
||||||
|
[]dyn.Location{{File: file, Line: 18, Column: 17}},
|
||||||
|
),
|
||||||
|
"quantity": dyn.NewValue(
|
||||||
|
4,
|
||||||
|
[]dyn.Location{{File: file, Line: 19, Column: 17}},
|
||||||
|
),
|
||||||
|
"description": dyn.NewValue(
|
||||||
|
"Basketball",
|
||||||
|
[]dyn.Location{{File: file, Line: 20, Column: 17}},
|
||||||
|
),
|
||||||
|
"price": dyn.NewValue(
|
||||||
|
450.0,
|
||||||
|
[]dyn.Location{{File: file, Line: 21, Column: 17}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 18, Column: 3}},
|
||||||
|
), dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"sku": dyn.NewValue(
|
||||||
|
"BL4438H",
|
||||||
|
[]dyn.Location{{File: file, Line: 22, Column: 17}},
|
||||||
|
),
|
||||||
|
"quantity": dyn.NewValue(
|
||||||
|
1,
|
||||||
|
[]dyn.Location{{File: file, Line: 23, Column: 17}},
|
||||||
|
),
|
||||||
|
"description": dyn.NewValue(
|
||||||
|
"Super Hoop",
|
||||||
|
[]dyn.Location{{File: file, Line: 24, Column: 17}},
|
||||||
|
),
|
||||||
|
"price": dyn.NewValue(
|
||||||
|
2392.0,
|
||||||
|
[]dyn.Location{{File: file, Line: 25, Column: 17}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 22, Column: 3}},
|
||||||
|
)},
|
||||||
|
[]dyn.Location{{File: file, Line: 18, Column: 1}},
|
||||||
|
),
|
||||||
|
"tax": dyn.NewValue(
|
||||||
|
251.42,
|
||||||
|
[]dyn.Location{{File: file, Line: 26, Column: 8}},
|
||||||
|
),
|
||||||
|
"total": dyn.NewValue(
|
||||||
|
4443.52,
|
||||||
|
[]dyn.Location{{File: file, Line: 27, Column: 8}},
|
||||||
|
),
|
||||||
|
"comments": dyn.NewValue(
|
||||||
|
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.",
|
||||||
|
[]dyn.Location{{File: file, Line: 29, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 3, Column: 5}},
|
||||||
|
), self)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestYAMLSpecExample_2_28(t *testing.T) {
|
||||||
|
file := "testdata/spec_example_2.28.yml"
|
||||||
|
self := loadExample(t, file)
|
||||||
|
|
||||||
|
assert.Equal(t, dyn.NewValue(
|
||||||
|
map[string]dyn.Value{
|
||||||
|
"Time": dyn.NewValue(
|
||||||
|
"2001-11-23 15:01:42 -5",
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 7}},
|
||||||
|
),
|
||||||
|
"User": dyn.NewValue(
|
||||||
|
"ed",
|
||||||
|
[]dyn.Location{{File: file, Line: 5, Column: 7}},
|
||||||
|
),
|
||||||
|
"Warning": dyn.NewValue(
|
||||||
|
"This is an error message for the log file",
|
||||||
|
[]dyn.Location{{File: file, Line: 7, Column: 3}},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
[]dyn.Location{{File: file, Line: 4, Column: 1}},
|
||||||
|
), self)
|
||||||
|
}
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"reflect"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/diag"
|
"github.com/databricks/cli/libs/diag"
|
||||||
"github.com/databricks/cli/libs/dyn/convert"
|
"github.com/databricks/cli/libs/dyn/convert"
|
||||||
|
@ -63,11 +64,24 @@ func (j *JsonFlag) Unmarshal(v any) diag.Diagnostics {
|
||||||
return diags.Extend(diag.FromErr(err))
|
return diags.Extend(diag.FromErr(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Finally unmarshal the normalized data to the output.
|
kind := reflect.ValueOf(v).Kind()
|
||||||
// It will fill in the ForceSendFields field if the struct contains it.
|
if kind == reflect.Ptr {
|
||||||
err = marshal.Unmarshal(data, v)
|
kind = reflect.ValueOf(v).Elem().Kind()
|
||||||
if err != nil {
|
}
|
||||||
return diags.Extend(diag.FromErr(err))
|
|
||||||
|
if kind == reflect.Struct {
|
||||||
|
// Finally unmarshal the normalized data to the output.
|
||||||
|
// It will fill in the ForceSendFields field if the struct contains it.
|
||||||
|
err = marshal.Unmarshal(data, v)
|
||||||
|
if err != nil {
|
||||||
|
return diags.Extend(diag.FromErr(err))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If the output is not a struct, just unmarshal the data to the output.
|
||||||
|
err = json.Unmarshal(data, v)
|
||||||
|
if err != nil {
|
||||||
|
return diags.Extend(diag.FromErr(err))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return diags
|
return diags
|
||||||
|
|
|
@ -13,10 +13,6 @@ import (
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
type requestType struct {
|
|
||||||
Foo string `json:"foo"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestJsonFlagEmpty(t *testing.T) {
|
func TestJsonFlagEmpty(t *testing.T) {
|
||||||
var body JsonFlag
|
var body JsonFlag
|
||||||
|
|
||||||
|
@ -35,13 +31,13 @@ func TestJsonFlagInline(t *testing.T) {
|
||||||
err := body.Set(`{"foo": "bar"}`)
|
err := body.Set(`{"foo": "bar"}`)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
var request requestType
|
var request any
|
||||||
diags := body.Unmarshal(&request)
|
diags := body.Unmarshal(&request)
|
||||||
assert.NoError(t, diags.Error())
|
assert.NoError(t, diags.Error())
|
||||||
assert.Empty(t, diags)
|
assert.Empty(t, diags)
|
||||||
|
|
||||||
assert.Equal(t, "JSON (14 bytes)", body.String())
|
assert.Equal(t, "JSON (14 bytes)", body.String())
|
||||||
assert.Equal(t, requestType{"bar"}, request)
|
assert.Equal(t, map[string]any{"foo": "bar"}, request)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestJsonFlagError(t *testing.T) {
|
func TestJsonFlagError(t *testing.T) {
|
||||||
|
@ -50,7 +46,7 @@ func TestJsonFlagError(t *testing.T) {
|
||||||
err := body.Set(`{"foo":`)
|
err := body.Set(`{"foo":`)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
var request requestType
|
var request any
|
||||||
diags := body.Unmarshal(&request)
|
diags := body.Unmarshal(&request)
|
||||||
assert.EqualError(t, diags.Error(), "error decoding JSON at (inline):1:8: unexpected end of JSON input")
|
assert.EqualError(t, diags.Error(), "error decoding JSON at (inline):1:8: unexpected end of JSON input")
|
||||||
assert.Equal(t, "JSON (7 bytes)", body.String())
|
assert.Equal(t, "JSON (7 bytes)", body.String())
|
||||||
|
@ -58,7 +54,7 @@ func TestJsonFlagError(t *testing.T) {
|
||||||
|
|
||||||
func TestJsonFlagFile(t *testing.T) {
|
func TestJsonFlagFile(t *testing.T) {
|
||||||
var body JsonFlag
|
var body JsonFlag
|
||||||
var request requestType
|
var request any
|
||||||
|
|
||||||
var fpath string
|
var fpath string
|
||||||
var payload = []byte(`{"foo": "bar"}`)
|
var payload = []byte(`{"foo": "bar"}`)
|
||||||
|
@ -78,7 +74,7 @@ func TestJsonFlagFile(t *testing.T) {
|
||||||
assert.NoError(t, diags.Error())
|
assert.NoError(t, diags.Error())
|
||||||
assert.Empty(t, diags)
|
assert.Empty(t, diags)
|
||||||
|
|
||||||
assert.Equal(t, requestType{"bar"}, request)
|
assert.Equal(t, map[string]any{"foo": "bar"}, request)
|
||||||
}
|
}
|
||||||
|
|
||||||
const jsonData = `
|
const jsonData = `
|
||||||
|
|
Loading…
Reference in New Issue