Merge branch 'main' into feature/logout

This commit is contained in:
Richard Nordström 2024-10-22 21:10:07 +02:00 committed by GitHub
commit fc23aa584d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
96 changed files with 3479 additions and 465 deletions

View File

@ -1 +1 @@
0c86ea6dbd9a730c24ff0d4e509603e476955ac5 cf9c61453990df0f9453670f2fe68e1b128647a2

1
.gitattributes vendored
View File

@ -54,6 +54,7 @@ cmd/workspace/dashboards/dashboards.go linguist-generated=true
cmd/workspace/data-sources/data-sources.go linguist-generated=true cmd/workspace/data-sources/data-sources.go linguist-generated=true
cmd/workspace/default-namespace/default-namespace.go linguist-generated=true cmd/workspace/default-namespace/default-namespace.go linguist-generated=true
cmd/workspace/disable-legacy-access/disable-legacy-access.go linguist-generated=true cmd/workspace/disable-legacy-access/disable-legacy-access.go linguist-generated=true
cmd/workspace/disable-legacy-dbfs/disable-legacy-dbfs.go linguist-generated=true
cmd/workspace/enhanced-security-monitoring/enhanced-security-monitoring.go linguist-generated=true cmd/workspace/enhanced-security-monitoring/enhanced-security-monitoring.go linguist-generated=true
cmd/workspace/experiments/experiments.go linguist-generated=true cmd/workspace/experiments/experiments.go linguist-generated=true
cmd/workspace/external-locations/external-locations.go linguist-generated=true cmd/workspace/external-locations/external-locations.go linguist-generated=true

View File

@ -0,0 +1,65 @@
package mutator
import (
"context"
"net/url"
"strconv"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
)
type initializeURLs struct {
}
// InitializeURLs makes sure the URL field of each resource is configured.
// NOTE: since this depends on an extra API call, this mutator adds some extra
// latency. As such, it should only be used when needed.
// This URL field is used for the output of the 'bundle summary' CLI command.
func InitializeURLs() bundle.Mutator {
return &initializeURLs{}
}
func (m *initializeURLs) Name() string {
return "InitializeURLs"
}
func (m *initializeURLs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
workspaceId, err := b.WorkspaceClient().CurrentWorkspaceID(ctx)
if err != nil {
return diag.FromErr(err)
}
orgId := strconv.FormatInt(workspaceId, 10)
host := b.WorkspaceClient().Config.CanonicalHostName()
initializeForWorkspace(b, orgId, host)
return nil
}
func initializeForWorkspace(b *bundle.Bundle, orgId string, host string) error {
baseURL, err := url.Parse(host)
if err != nil {
return err
}
// Add ?o=<workspace id> only if <workspace id> wasn't in the subdomain already.
// The ?o= is needed when vanity URLs / legacy workspace URLs are used.
// If it's not needed we prefer to leave it out since these URLs are rather
// long for most terminals.
//
// See https://docs.databricks.com/en/workspace/workspace-details.html for
// further reading about the '?o=' suffix.
if !strings.Contains(baseURL.Hostname(), orgId) {
values := baseURL.Query()
values.Add("o", orgId)
baseURL.RawQuery = values.Encode()
}
for _, group := range b.Config.Resources.AllResources() {
for _, r := range group.Resources {
r.InitializeURL(*baseURL)
}
}
return nil
}

View File

@ -0,0 +1,130 @@
package mutator
import (
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/ml"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/databricks/databricks-sdk-go/service/serving"
"github.com/stretchr/testify/require"
)
func TestInitializeURLs(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Workspace: config.Workspace{
Host: "https://mycompany.databricks.com/",
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
ID: "1",
JobSettings: &jobs.JobSettings{Name: "job1"},
},
},
Pipelines: map[string]*resources.Pipeline{
"pipeline1": {
ID: "3",
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"},
},
},
Experiments: map[string]*resources.MlflowExperiment{
"experiment1": {
ID: "4",
Experiment: &ml.Experiment{Name: "experiment1"},
},
},
Models: map[string]*resources.MlflowModel{
"model1": {
ID: "a model uses its name for identifier",
Model: &ml.Model{Name: "a model uses its name for identifier"},
},
},
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
"servingendpoint1": {
ID: "my_serving_endpoint",
CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "my_serving_endpoint",
},
},
},
RegisteredModels: map[string]*resources.RegisteredModel{
"registeredmodel1": {
ID: "8",
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "my_registered_model",
},
},
},
QualityMonitors: map[string]*resources.QualityMonitor{
"qualityMonitor1": {
CreateMonitor: &catalog.CreateMonitor{
TableName: "catalog.schema.qualityMonitor1",
},
},
},
Schemas: map[string]*resources.Schema{
"schema1": {
ID: "catalog.schema",
CreateSchema: &catalog.CreateSchema{
Name: "schema",
},
},
},
Clusters: map[string]*resources.Cluster{
"cluster1": {
ID: "1017-103929-vlr7jzcf",
ClusterSpec: &compute.ClusterSpec{
ClusterName: "cluster1",
},
},
},
},
},
}
expectedURLs := map[string]string{
"job1": "https://mycompany.databricks.com/jobs/1?o=123456",
"pipeline1": "https://mycompany.databricks.com/pipelines/3?o=123456",
"experiment1": "https://mycompany.databricks.com/ml/experiments/4?o=123456",
"model1": "https://mycompany.databricks.com/ml/models/a%20model%20uses%20its%20name%20for%20identifier?o=123456",
"servingendpoint1": "https://mycompany.databricks.com/ml/endpoints/my_serving_endpoint?o=123456",
"registeredmodel1": "https://mycompany.databricks.com/explore/data/models/8?o=123456",
"qualityMonitor1": "https://mycompany.databricks.com/explore/data/catalog/schema/qualityMonitor1?o=123456",
"schema1": "https://mycompany.databricks.com/explore/data/catalog/schema?o=123456",
"cluster1": "https://mycompany.databricks.com/compute/clusters/1017-103929-vlr7jzcf?o=123456",
}
initializeForWorkspace(b, "123456", "https://mycompany.databricks.com/")
for _, group := range b.Config.Resources.AllResources() {
for key, r := range group.Resources {
require.Equal(t, expectedURLs[key], r.GetURL(), "Unexpected URL for "+key)
}
}
}
func TestInitializeURLsWithoutOrgId(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
ID: "1",
JobSettings: &jobs.JobSettings{Name: "job1"},
},
},
},
},
}
initializeForWorkspace(b, "123456", "https://adb-123456.azuredatabricks.net/")
require.Equal(t, "https://adb-123456.azuredatabricks.net/jobs/1", b.Config.Resources.Jobs["job1"].URL)
}

View File

@ -699,6 +699,9 @@ func TestTranslatePathJobEnvironments(t *testing.T) {
"../dist/env2.whl", "../dist/env2.whl",
"simplejson", "simplejson",
"/Workspace/Users/foo@bar.com/test.whl", "/Workspace/Users/foo@bar.com/test.whl",
"--extra-index-url https://name:token@gitlab.com/api/v4/projects/9876/packages/pypi/simple foobar",
"foobar --extra-index-url https://name:token@gitlab.com/api/v4/projects/9876/packages/pypi/simple",
"https://foo@bar.com/packages/pypi/simple",
}, },
}, },
}, },
@ -719,6 +722,9 @@ func TestTranslatePathJobEnvironments(t *testing.T) {
assert.Equal(t, strings.Join([]string{".", "dist", "env2.whl"}, string(os.PathSeparator)), b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[1]) assert.Equal(t, strings.Join([]string{".", "dist", "env2.whl"}, string(os.PathSeparator)), b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[1])
assert.Equal(t, "simplejson", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[2]) assert.Equal(t, "simplejson", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[2])
assert.Equal(t, "/Workspace/Users/foo@bar.com/test.whl", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[3]) assert.Equal(t, "/Workspace/Users/foo@bar.com/test.whl", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[3])
assert.Equal(t, "--extra-index-url https://name:token@gitlab.com/api/v4/projects/9876/packages/pypi/simple foobar", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[4])
assert.Equal(t, "foobar --extra-index-url https://name:token@gitlab.com/api/v4/projects/9876/packages/pypi/simple", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[5])
assert.Equal(t, "https://foo@bar.com/packages/pypi/simple", b.Config.Resources.Jobs["job"].JobSettings.Environments[0].Spec.Dependencies[6])
} }
func TestTranslatePathWithComplexVariables(t *testing.T) { func TestTranslatePathWithComplexVariables(t *testing.T) {

View File

@ -3,6 +3,7 @@ package config
import ( import (
"context" "context"
"fmt" "fmt"
"net/url"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -30,6 +31,53 @@ type ConfigResource interface {
// Terraform equivalent name of the resource. For example "databricks_job" // Terraform equivalent name of the resource. For example "databricks_job"
// for jobs and "databricks_pipeline" for pipelines. // for jobs and "databricks_pipeline" for pipelines.
TerraformResourceName() string TerraformResourceName() string
// GetName returns the in-product name of the resource.
GetName() string
// GetURL returns the URL of the resource.
GetURL() string
// InitializeURL initializes the URL field of the resource.
InitializeURL(baseURL url.URL)
}
// ResourceGroup represents a group of resources of the same type.
// It includes a description of the resource type and a map of resources.
type ResourceGroup struct {
Description ResourceDescription
Resources map[string]ConfigResource
}
// collectResourceMap collects resources of a specific type into a ResourceGroup.
func collectResourceMap[T ConfigResource](
description ResourceDescription,
input map[string]T,
) ResourceGroup {
resources := make(map[string]ConfigResource)
for key, resource := range input {
resources[key] = resource
}
return ResourceGroup{
Description: description,
Resources: resources,
}
}
// AllResources returns all resources in the bundle grouped by their resource type.
func (r *Resources) AllResources() []ResourceGroup {
descriptions := SupportedResources()
return []ResourceGroup{
collectResourceMap(descriptions["jobs"], r.Jobs),
collectResourceMap(descriptions["pipelines"], r.Pipelines),
collectResourceMap(descriptions["models"], r.Models),
collectResourceMap(descriptions["experiments"], r.Experiments),
collectResourceMap(descriptions["model_serving_endpoints"], r.ModelServingEndpoints),
collectResourceMap(descriptions["registered_models"], r.RegisteredModels),
collectResourceMap(descriptions["quality_monitors"], r.QualityMonitors),
collectResourceMap(descriptions["schemas"], r.Schemas),
collectResourceMap(descriptions["clusters"], r.Clusters),
}
} }
func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) { func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) {
@ -61,20 +109,71 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error)
} }
type ResourceDescription struct { type ResourceDescription struct {
// Singular and plural name when used to refer to the configuration.
SingularName string SingularName string
PluralName string
// Singular and plural title when used in summaries / terminal UI.
SingularTitle string
PluralTitle string
} }
// The keys of the map corresponds to the resource key in the bundle configuration. // The keys of the map corresponds to the resource key in the bundle configuration.
func SupportedResources() map[string]ResourceDescription { func SupportedResources() map[string]ResourceDescription {
return map[string]ResourceDescription{ return map[string]ResourceDescription{
"jobs": {SingularName: "job"}, "jobs": {
"pipelines": {SingularName: "pipeline"}, SingularName: "job",
"models": {SingularName: "model"}, PluralName: "jobs",
"experiments": {SingularName: "experiment"}, SingularTitle: "Job",
"model_serving_endpoints": {SingularName: "model_serving_endpoint"}, PluralTitle: "Jobs",
"registered_models": {SingularName: "registered_model"}, },
"quality_monitors": {SingularName: "quality_monitor"}, "pipelines": {
"schemas": {SingularName: "schema"}, SingularName: "pipeline",
"clusters": {SingularName: "cluster"}, PluralName: "pipelines",
SingularTitle: "Pipeline",
PluralTitle: "Pipelines",
},
"models": {
SingularName: "model",
PluralName: "models",
SingularTitle: "Model",
PluralTitle: "Models",
},
"experiments": {
SingularName: "experiment",
PluralName: "experiments",
SingularTitle: "Experiment",
PluralTitle: "Experiments",
},
"model_serving_endpoints": {
SingularName: "model_serving_endpoint",
PluralName: "model_serving_endpoints",
SingularTitle: "Model Serving Endpoint",
PluralTitle: "Model Serving Endpoints",
},
"registered_models": {
SingularName: "registered_model",
PluralName: "registered_models",
SingularTitle: "Registered Model",
PluralTitle: "Registered Models",
},
"quality_monitors": {
SingularName: "quality_monitor",
PluralName: "quality_monitors",
SingularTitle: "Quality Monitor",
PluralTitle: "Quality Monitors",
},
"schemas": {
SingularName: "schema",
PluralName: "schemas",
SingularTitle: "Schema",
PluralTitle: "Schemas",
},
"clusters": {
SingularName: "cluster",
PluralName: "clusters",
SingularTitle: "Cluster",
PluralTitle: "Clusters",
},
} }
} }

View File

@ -2,6 +2,8 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -13,6 +15,7 @@ type Cluster struct {
ID string `json:"id,omitempty" bundle:"readonly"` ID string `json:"id,omitempty" bundle:"readonly"`
Permissions []Permission `json:"permissions,omitempty"` Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
*compute.ClusterSpec *compute.ClusterSpec
} }
@ -37,3 +40,19 @@ func (s *Cluster) Exists(ctx context.Context, w *databricks.WorkspaceClient, id
func (s *Cluster) TerraformResourceName() string { func (s *Cluster) TerraformResourceName() string {
return "databricks_cluster" return "databricks_cluster"
} }
func (s *Cluster) InitializeURL(baseURL url.URL) {
if s.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("compute/clusters/%s", s.ID)
s.URL = baseURL.String()
}
func (s *Cluster) GetName() string {
return s.ClusterName
}
func (s *Cluster) GetURL() string {
return s.URL
}

View File

@ -2,6 +2,8 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"strconv" "strconv"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
@ -14,6 +16,7 @@ type Job struct {
ID string `json:"id,omitempty" bundle:"readonly"` ID string `json:"id,omitempty" bundle:"readonly"`
Permissions []Permission `json:"permissions,omitempty"` Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
*jobs.JobSettings *jobs.JobSettings
} }
@ -44,3 +47,19 @@ func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id stri
func (j *Job) TerraformResourceName() string { func (j *Job) TerraformResourceName() string {
return "databricks_job" return "databricks_job"
} }
func (j *Job) InitializeURL(baseURL url.URL) {
if j.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("jobs/%s", j.ID)
j.URL = baseURL.String()
}
func (j *Job) GetName() string {
return j.Name
}
func (j *Job) GetURL() string {
return j.URL
}

View File

@ -2,6 +2,8 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -13,6 +15,7 @@ type MlflowExperiment struct {
ID string `json:"id,omitempty" bundle:"readonly"` ID string `json:"id,omitempty" bundle:"readonly"`
Permissions []Permission `json:"permissions,omitempty"` Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
*ml.Experiment *ml.Experiment
} }
@ -39,3 +42,19 @@ func (s *MlflowExperiment) Exists(ctx context.Context, w *databricks.WorkspaceCl
func (s *MlflowExperiment) TerraformResourceName() string { func (s *MlflowExperiment) TerraformResourceName() string {
return "databricks_mlflow_experiment" return "databricks_mlflow_experiment"
} }
func (s *MlflowExperiment) InitializeURL(baseURL url.URL) {
if s.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("ml/experiments/%s", s.ID)
s.URL = baseURL.String()
}
func (s *MlflowExperiment) GetName() string {
return s.Name
}
func (s *MlflowExperiment) GetURL() string {
return s.URL
}

View File

@ -2,6 +2,8 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -13,6 +15,7 @@ type MlflowModel struct {
ID string `json:"id,omitempty" bundle:"readonly"` ID string `json:"id,omitempty" bundle:"readonly"`
Permissions []Permission `json:"permissions,omitempty"` Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
*ml.Model *ml.Model
} }
@ -39,3 +42,19 @@ func (s *MlflowModel) Exists(ctx context.Context, w *databricks.WorkspaceClient,
func (s *MlflowModel) TerraformResourceName() string { func (s *MlflowModel) TerraformResourceName() string {
return "databricks_mlflow_model" return "databricks_mlflow_model"
} }
func (s *MlflowModel) InitializeURL(baseURL url.URL) {
if s.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("ml/models/%s", s.ID)
s.URL = baseURL.String()
}
func (s *MlflowModel) GetName() string {
return s.Name
}
func (s *MlflowModel) GetURL() string {
return s.URL
}

View File

@ -2,6 +2,8 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -23,6 +25,7 @@ type ModelServingEndpoint struct {
Permissions []Permission `json:"permissions,omitempty"` Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
} }
func (s *ModelServingEndpoint) UnmarshalJSON(b []byte) error { func (s *ModelServingEndpoint) UnmarshalJSON(b []byte) error {
@ -47,3 +50,19 @@ func (s *ModelServingEndpoint) Exists(ctx context.Context, w *databricks.Workspa
func (s *ModelServingEndpoint) TerraformResourceName() string { func (s *ModelServingEndpoint) TerraformResourceName() string {
return "databricks_model_serving" return "databricks_model_serving"
} }
func (s *ModelServingEndpoint) InitializeURL(baseURL url.URL) {
if s.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("ml/endpoints/%s", s.ID)
s.URL = baseURL.String()
}
func (s *ModelServingEndpoint) GetName() string {
return s.Name
}
func (s *ModelServingEndpoint) GetURL() string {
return s.URL
}

View File

@ -2,6 +2,8 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -13,6 +15,7 @@ type Pipeline struct {
ID string `json:"id,omitempty" bundle:"readonly"` ID string `json:"id,omitempty" bundle:"readonly"`
Permissions []Permission `json:"permissions,omitempty"` Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
*pipelines.PipelineSpec *pipelines.PipelineSpec
} }
@ -39,3 +42,19 @@ func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id
func (p *Pipeline) TerraformResourceName() string { func (p *Pipeline) TerraformResourceName() string {
return "databricks_pipeline" return "databricks_pipeline"
} }
func (p *Pipeline) InitializeURL(baseURL url.URL) {
if p.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("pipelines/%s", p.ID)
p.URL = baseURL.String()
}
func (p *Pipeline) GetName() string {
return p.Name
}
func (s *Pipeline) GetURL() string {
return s.URL
}

View File

@ -2,6 +2,9 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"strings"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -20,6 +23,7 @@ type QualityMonitor struct {
ID string `json:"id,omitempty" bundle:"readonly"` ID string `json:"id,omitempty" bundle:"readonly"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
} }
func (s *QualityMonitor) UnmarshalJSON(b []byte) error { func (s *QualityMonitor) UnmarshalJSON(b []byte) error {
@ -44,3 +48,19 @@ func (s *QualityMonitor) Exists(ctx context.Context, w *databricks.WorkspaceClie
func (s *QualityMonitor) TerraformResourceName() string { func (s *QualityMonitor) TerraformResourceName() string {
return "databricks_quality_monitor" return "databricks_quality_monitor"
} }
func (s *QualityMonitor) InitializeURL(baseURL url.URL) {
if s.TableName == "" {
return
}
baseURL.Path = fmt.Sprintf("explore/data/%s", strings.ReplaceAll(s.TableName, ".", "/"))
s.URL = baseURL.String()
}
func (s *QualityMonitor) GetName() string {
return s.TableName
}
func (s *QualityMonitor) GetURL() string {
return s.URL
}

View File

@ -2,6 +2,9 @@ package resources
import ( import (
"context" "context"
"fmt"
"net/url"
"strings"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
@ -24,6 +27,7 @@ type RegisteredModel struct {
*catalog.CreateRegisteredModelRequest *catalog.CreateRegisteredModelRequest
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
} }
func (s *RegisteredModel) UnmarshalJSON(b []byte) error { func (s *RegisteredModel) UnmarshalJSON(b []byte) error {
@ -48,3 +52,19 @@ func (s *RegisteredModel) Exists(ctx context.Context, w *databricks.WorkspaceCli
func (s *RegisteredModel) TerraformResourceName() string { func (s *RegisteredModel) TerraformResourceName() string {
return "databricks_registered_model" return "databricks_registered_model"
} }
func (s *RegisteredModel) InitializeURL(baseURL url.URL) {
if s.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("explore/data/models/%s", strings.ReplaceAll(s.ID, ".", "/"))
s.URL = baseURL.String()
}
func (s *RegisteredModel) GetName() string {
return s.Name
}
func (s *RegisteredModel) GetURL() string {
return s.URL
}

View File

@ -1,6 +1,12 @@
package resources package resources
import ( import (
"context"
"fmt"
"net/url"
"strings"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/marshal"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
) )
@ -16,6 +22,31 @@ type Schema struct {
*catalog.CreateSchema *catalog.CreateSchema
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
}
func (s *Schema) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
return false, fmt.Errorf("schema.Exists() is not supported")
}
func (s *Schema) TerraformResourceName() string {
return "databricks_schema"
}
func (s *Schema) InitializeURL(baseURL url.URL) {
if s.ID == "" {
return
}
baseURL.Path = fmt.Sprintf("explore/data/%s", strings.ReplaceAll(s.ID, ".", "/"))
s.URL = baseURL.String()
}
func (s *Schema) GetURL() string {
return s.URL
}
func (s *Schema) GetName() string {
return s.Name
} }
func (s *Schema) UnmarshalJSON(b []byte) error { func (s *Schema) UnmarshalJSON(b []byte) error {

View File

@ -63,17 +63,37 @@ func TestCustomMarshallerIsImplemented(t *testing.T) {
} }
} }
func TestResourcesAllResourcesCompleteness(t *testing.T) {
r := Resources{}
rt := reflect.TypeOf(r)
// Collect set of includes resource types
var types []string
for _, group := range r.AllResources() {
types = append(types, group.Description.PluralName)
}
for i := 0; i < rt.NumField(); i++ {
field := rt.Field(i)
jsonTag := field.Tag.Get("json")
if idx := strings.Index(jsonTag, ","); idx != -1 {
jsonTag = jsonTag[:idx]
}
assert.Contains(t, types, jsonTag, "Field %s is missing in AllResources", field.Name)
}
}
func TestSupportedResources(t *testing.T) { func TestSupportedResources(t *testing.T) {
expected := map[string]ResourceDescription{} // Please add your resource to the SupportedResources() function in resources.go if you add a new resource.
actual := SupportedResources()
typ := reflect.TypeOf(Resources{}) typ := reflect.TypeOf(Resources{})
for i := 0; i < typ.NumField(); i++ { for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i) field := typ.Field(i)
jsonTags := strings.Split(field.Tag.Get("json"), ",") jsonTags := strings.Split(field.Tag.Get("json"), ",")
singularName := strings.TrimSuffix(jsonTags[0], "s") pluralName := jsonTags[0]
expected[jsonTags[0]] = ResourceDescription{SingularName: singularName} assert.Equal(t, actual[pluralName].PluralName, pluralName)
} }
// Please add your resource to the SupportedResources() function in resources.go
// if you are adding a new resource.
assert.Equal(t, expected, SupportedResources())
} }

View File

@ -2,9 +2,7 @@ package terraform
import ( import (
"context" "context"
"encoding/json"
"fmt" "fmt"
"sort"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
@ -14,244 +12,6 @@ import (
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
) )
func conv(from any, to any) {
buf, _ := json.Marshal(from)
json.Unmarshal(buf, &to)
}
func convPermissions(acl []resources.Permission) *schema.ResourcePermissions {
if len(acl) == 0 {
return nil
}
resource := schema.ResourcePermissions{}
for _, ac := range acl {
resource.AccessControl = append(resource.AccessControl, convPermission(ac))
}
return &resource
}
func convPermission(ac resources.Permission) schema.ResourcePermissionsAccessControl {
dst := schema.ResourcePermissionsAccessControl{
PermissionLevel: ac.Level,
}
if ac.UserName != "" {
dst.UserName = ac.UserName
}
if ac.GroupName != "" {
dst.GroupName = ac.GroupName
}
if ac.ServicePrincipalName != "" {
dst.ServicePrincipalName = ac.ServicePrincipalName
}
return dst
}
func convGrants(acl []resources.Grant) *schema.ResourceGrants {
if len(acl) == 0 {
return nil
}
resource := schema.ResourceGrants{}
for _, ac := range acl {
resource.Grant = append(resource.Grant, schema.ResourceGrantsGrant{
Privileges: ac.Privileges,
Principal: ac.Principal,
})
}
return &resource
}
// BundleToTerraform converts resources in a bundle configuration
// to the equivalent Terraform JSON representation.
//
// Note: This function is an older implementation of the conversion logic. It is
// no longer used in any code paths. It is kept around to be used in tests.
// New resources do not need to modify this function and can instead can define
// the conversion login in the tfdyn package.
func BundleToTerraform(config *config.Root) *schema.Root {
tfroot := schema.NewRoot()
tfroot.Provider = schema.NewProviders()
tfroot.Resource = schema.NewResources()
noResources := true
for k, src := range config.Resources.Jobs {
noResources = false
var dst schema.ResourceJob
conv(src, &dst)
if src.JobSettings != nil {
sort.Slice(src.JobSettings.Tasks, func(i, j int) bool {
return src.JobSettings.Tasks[i].TaskKey < src.JobSettings.Tasks[j].TaskKey
})
for _, v := range src.Tasks {
var t schema.ResourceJobTask
conv(v, &t)
for _, v_ := range v.Libraries {
var l schema.ResourceJobTaskLibrary
conv(v_, &l)
t.Library = append(t.Library, l)
}
// Convert for_each_task libraries
if v.ForEachTask != nil {
for _, v_ := range v.ForEachTask.Task.Libraries {
var l schema.ResourceJobTaskForEachTaskTaskLibrary
conv(v_, &l)
t.ForEachTask.Task.Library = append(t.ForEachTask.Task.Library, l)
}
}
dst.Task = append(dst.Task, t)
}
for _, v := range src.JobClusters {
var t schema.ResourceJobJobCluster
conv(v, &t)
dst.JobCluster = append(dst.JobCluster, t)
}
// Unblock downstream work. To be addressed more generally later.
if git := src.GitSource; git != nil {
dst.GitSource = &schema.ResourceJobGitSource{
Url: git.GitUrl,
Branch: git.GitBranch,
Commit: git.GitCommit,
Provider: string(git.GitProvider),
Tag: git.GitTag,
}
}
for _, v := range src.Parameters {
var t schema.ResourceJobParameter
conv(v, &t)
dst.Parameter = append(dst.Parameter, t)
}
}
tfroot.Resource.Job[k] = &dst
// Configure permissions for this resource.
if rp := convPermissions(src.Permissions); rp != nil {
rp.JobId = fmt.Sprintf("${databricks_job.%s.id}", k)
tfroot.Resource.Permissions["job_"+k] = rp
}
}
for k, src := range config.Resources.Pipelines {
noResources = false
var dst schema.ResourcePipeline
conv(src, &dst)
if src.PipelineSpec != nil {
for _, v := range src.Libraries {
var l schema.ResourcePipelineLibrary
conv(v, &l)
dst.Library = append(dst.Library, l)
}
for _, v := range src.Clusters {
var l schema.ResourcePipelineCluster
conv(v, &l)
dst.Cluster = append(dst.Cluster, l)
}
for _, v := range src.Notifications {
var l schema.ResourcePipelineNotification
conv(v, &l)
dst.Notification = append(dst.Notification, l)
}
}
tfroot.Resource.Pipeline[k] = &dst
// Configure permissions for this resource.
if rp := convPermissions(src.Permissions); rp != nil {
rp.PipelineId = fmt.Sprintf("${databricks_pipeline.%s.id}", k)
tfroot.Resource.Permissions["pipeline_"+k] = rp
}
}
for k, src := range config.Resources.Models {
noResources = false
var dst schema.ResourceMlflowModel
conv(src, &dst)
tfroot.Resource.MlflowModel[k] = &dst
// Configure permissions for this resource.
if rp := convPermissions(src.Permissions); rp != nil {
rp.RegisteredModelId = fmt.Sprintf("${databricks_mlflow_model.%s.registered_model_id}", k)
tfroot.Resource.Permissions["mlflow_model_"+k] = rp
}
}
for k, src := range config.Resources.Experiments {
noResources = false
var dst schema.ResourceMlflowExperiment
conv(src, &dst)
tfroot.Resource.MlflowExperiment[k] = &dst
// Configure permissions for this resource.
if rp := convPermissions(src.Permissions); rp != nil {
rp.ExperimentId = fmt.Sprintf("${databricks_mlflow_experiment.%s.id}", k)
tfroot.Resource.Permissions["mlflow_experiment_"+k] = rp
}
}
for k, src := range config.Resources.ModelServingEndpoints {
noResources = false
var dst schema.ResourceModelServing
conv(src, &dst)
tfroot.Resource.ModelServing[k] = &dst
// Configure permissions for this resource.
if rp := convPermissions(src.Permissions); rp != nil {
rp.ServingEndpointId = fmt.Sprintf("${databricks_model_serving.%s.serving_endpoint_id}", k)
tfroot.Resource.Permissions["model_serving_"+k] = rp
}
}
for k, src := range config.Resources.RegisteredModels {
noResources = false
var dst schema.ResourceRegisteredModel
conv(src, &dst)
tfroot.Resource.RegisteredModel[k] = &dst
// Configure permissions for this resource.
if rp := convGrants(src.Grants); rp != nil {
rp.Function = fmt.Sprintf("${databricks_registered_model.%s.id}", k)
tfroot.Resource.Grants["registered_model_"+k] = rp
}
}
for k, src := range config.Resources.QualityMonitors {
noResources = false
var dst schema.ResourceQualityMonitor
conv(src, &dst)
tfroot.Resource.QualityMonitor[k] = &dst
}
for k, src := range config.Resources.Clusters {
noResources = false
var dst schema.ResourceCluster
conv(src, &dst)
tfroot.Resource.Cluster[k] = &dst
}
// We explicitly set "resource" to nil to omit it from a JSON encoding.
// This is required because the terraform CLI requires >= 1 resources defined
// if the "resource" property is used in a .tf.json file.
if noResources {
tfroot.Resource = nil
}
return tfroot
}
// BundleToTerraformWithDynValue converts resources in a bundle configuration // BundleToTerraformWithDynValue converts resources in a bundle configuration
// to the equivalent Terraform JSON representation. // to the equivalent Terraform JSON representation.
func BundleToTerraformWithDynValue(ctx context.Context, root dyn.Value) (*schema.Root, error) { func BundleToTerraformWithDynValue(ctx context.Context, root dyn.Value) (*schema.Root, error) {

View File

@ -2,7 +2,6 @@ package terraform
import ( import (
"context" "context"
"encoding/json"
"reflect" "reflect"
"testing" "testing"
@ -21,6 +20,27 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func produceTerraformConfiguration(t *testing.T, config config.Root) *schema.Root {
vin, err := convert.FromTyped(config, dyn.NilValue)
require.NoError(t, err)
out, err := BundleToTerraformWithDynValue(context.Background(), vin)
require.NoError(t, err)
return out
}
func convertToResourceStruct[T any](t *testing.T, resource *T, data any) {
require.NotNil(t, resource)
require.NotNil(t, data)
// Convert data to a dyn.Value.
vin, err := convert.FromTyped(data, dyn.NilValue)
require.NoError(t, err)
// Convert the dyn.Value to a struct.
err = convert.ToTyped(resource, vin)
require.NoError(t, err)
}
func TestBundleToTerraformJob(t *testing.T) { func TestBundleToTerraformJob(t *testing.T) {
var src = resources.Job{ var src = resources.Job{
JobSettings: &jobs.JobSettings{ JobSettings: &jobs.JobSettings{
@ -58,8 +78,9 @@ func TestBundleToTerraformJob(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceJob
resource := out.Resource.Job["my_job"].(*schema.ResourceJob) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Job["my_job"])
assert.Equal(t, "my job", resource.Name) assert.Equal(t, "my job", resource.Name)
assert.Len(t, resource.JobCluster, 1) assert.Len(t, resource.JobCluster, 1)
@ -68,8 +89,6 @@ func TestBundleToTerraformJob(t *testing.T) {
assert.Equal(t, "param1", resource.Parameter[0].Name) assert.Equal(t, "param1", resource.Parameter[0].Name)
assert.Equal(t, "param2", resource.Parameter[1].Name) assert.Equal(t, "param2", resource.Parameter[1].Name)
assert.Nil(t, out.Data) assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformJobPermissions(t *testing.T) { func TestBundleToTerraformJobPermissions(t *testing.T) {
@ -90,15 +109,14 @@ func TestBundleToTerraformJobPermissions(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourcePermissions
resource := out.Resource.Permissions["job_my_job"].(*schema.ResourcePermissions) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Permissions["job_my_job"])
assert.NotEmpty(t, resource.JobId) assert.NotEmpty(t, resource.JobId)
assert.Len(t, resource.AccessControl, 1) assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName) assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel) assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformJobTaskLibraries(t *testing.T) { func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
@ -128,15 +146,14 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceJob
resource := out.Resource.Job["my_job"].(*schema.ResourceJob) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Job["my_job"])
assert.Equal(t, "my job", resource.Name) assert.Equal(t, "my job", resource.Name)
require.Len(t, resource.Task, 1) require.Len(t, resource.Task, 1)
require.Len(t, resource.Task[0].Library, 1) require.Len(t, resource.Task[0].Library, 1)
assert.Equal(t, "mlflow", resource.Task[0].Library[0].Pypi.Package) assert.Equal(t, "mlflow", resource.Task[0].Library[0].Pypi.Package)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformForEachTaskLibraries(t *testing.T) { func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
@ -172,15 +189,14 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceJob
resource := out.Resource.Job["my_job"].(*schema.ResourceJob) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Job["my_job"])
assert.Equal(t, "my job", resource.Name) assert.Equal(t, "my job", resource.Name)
require.Len(t, resource.Task, 1) require.Len(t, resource.Task, 1)
require.Len(t, resource.Task[0].ForEachTask.Task.Library, 1) require.Len(t, resource.Task[0].ForEachTask.Task.Library, 1)
assert.Equal(t, "mlflow", resource.Task[0].ForEachTask.Task.Library[0].Pypi.Package) assert.Equal(t, "mlflow", resource.Task[0].ForEachTask.Task.Library[0].Pypi.Package)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformPipeline(t *testing.T) { func TestBundleToTerraformPipeline(t *testing.T) {
@ -230,8 +246,9 @@ func TestBundleToTerraformPipeline(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourcePipeline
resource := out.Resource.Pipeline["my_pipeline"].(*schema.ResourcePipeline) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Pipeline["my_pipeline"])
assert.Equal(t, "my pipeline", resource.Name) assert.Equal(t, "my pipeline", resource.Name)
assert.Len(t, resource.Library, 2) assert.Len(t, resource.Library, 2)
@ -241,8 +258,6 @@ func TestBundleToTerraformPipeline(t *testing.T) {
assert.Equal(t, resource.Notification[1].Alerts, []string{"on-update-failure", "on-flow-failure"}) assert.Equal(t, resource.Notification[1].Alerts, []string{"on-update-failure", "on-flow-failure"})
assert.Equal(t, resource.Notification[1].EmailRecipients, []string{"jane@doe.com", "john@doe.com"}) assert.Equal(t, resource.Notification[1].EmailRecipients, []string{"jane@doe.com", "john@doe.com"})
assert.Nil(t, out.Data) assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformPipelinePermissions(t *testing.T) { func TestBundleToTerraformPipelinePermissions(t *testing.T) {
@ -263,15 +278,14 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourcePermissions
resource := out.Resource.Permissions["pipeline_my_pipeline"].(*schema.ResourcePermissions) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Permissions["pipeline_my_pipeline"])
assert.NotEmpty(t, resource.PipelineId) assert.NotEmpty(t, resource.PipelineId)
assert.Len(t, resource.AccessControl, 1) assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName) assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel) assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformModel(t *testing.T) { func TestBundleToTerraformModel(t *testing.T) {
@ -300,8 +314,9 @@ func TestBundleToTerraformModel(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceMlflowModel
resource := out.Resource.MlflowModel["my_model"].(*schema.ResourceMlflowModel) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.MlflowModel["my_model"])
assert.Equal(t, "name", resource.Name) assert.Equal(t, "name", resource.Name)
assert.Equal(t, "description", resource.Description) assert.Equal(t, "description", resource.Description)
@ -311,8 +326,6 @@ func TestBundleToTerraformModel(t *testing.T) {
assert.Equal(t, "k2", resource.Tags[1].Key) assert.Equal(t, "k2", resource.Tags[1].Key)
assert.Equal(t, "v2", resource.Tags[1].Value) assert.Equal(t, "v2", resource.Tags[1].Value)
assert.Nil(t, out.Data) assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformModelPermissions(t *testing.T) { func TestBundleToTerraformModelPermissions(t *testing.T) {
@ -336,15 +349,14 @@ func TestBundleToTerraformModelPermissions(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourcePermissions
resource := out.Resource.Permissions["mlflow_model_my_model"].(*schema.ResourcePermissions) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Permissions["mlflow_model_my_model"])
assert.NotEmpty(t, resource.RegisteredModelId) assert.NotEmpty(t, resource.RegisteredModelId)
assert.Len(t, resource.AccessControl, 1) assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName) assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_READ", resource.AccessControl[0].PermissionLevel) assert.Equal(t, "CAN_READ", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformExperiment(t *testing.T) { func TestBundleToTerraformExperiment(t *testing.T) {
@ -362,13 +374,12 @@ func TestBundleToTerraformExperiment(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceMlflowExperiment
resource := out.Resource.MlflowExperiment["my_experiment"].(*schema.ResourceMlflowExperiment) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.MlflowExperiment["my_experiment"])
assert.Equal(t, "name", resource.Name) assert.Equal(t, "name", resource.Name)
assert.Nil(t, out.Data) assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformExperimentPermissions(t *testing.T) { func TestBundleToTerraformExperimentPermissions(t *testing.T) {
@ -392,15 +403,14 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourcePermissions
resource := out.Resource.Permissions["mlflow_experiment_my_experiment"].(*schema.ResourcePermissions) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Permissions["mlflow_experiment_my_experiment"])
assert.NotEmpty(t, resource.ExperimentId) assert.NotEmpty(t, resource.ExperimentId)
assert.Len(t, resource.AccessControl, 1) assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName) assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_READ", resource.AccessControl[0].PermissionLevel) assert.Equal(t, "CAN_READ", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformModelServing(t *testing.T) { func TestBundleToTerraformModelServing(t *testing.T) {
@ -436,8 +446,9 @@ func TestBundleToTerraformModelServing(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceModelServing
resource := out.Resource.ModelServing["my_model_serving_endpoint"].(*schema.ResourceModelServing) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.ModelServing["my_model_serving_endpoint"])
assert.Equal(t, "name", resource.Name) assert.Equal(t, "name", resource.Name)
assert.Equal(t, "model_name", resource.Config.ServedModels[0].ModelName) assert.Equal(t, "model_name", resource.Config.ServedModels[0].ModelName)
@ -447,8 +458,6 @@ func TestBundleToTerraformModelServing(t *testing.T) {
assert.Equal(t, "model_name-1", resource.Config.TrafficConfig.Routes[0].ServedModelName) assert.Equal(t, "model_name-1", resource.Config.TrafficConfig.Routes[0].ServedModelName)
assert.Equal(t, 100, resource.Config.TrafficConfig.Routes[0].TrafficPercentage) assert.Equal(t, 100, resource.Config.TrafficConfig.Routes[0].TrafficPercentage)
assert.Nil(t, out.Data) assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformModelServingPermissions(t *testing.T) { func TestBundleToTerraformModelServingPermissions(t *testing.T) {
@ -490,15 +499,14 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourcePermissions
resource := out.Resource.Permissions["model_serving_my_model_serving_endpoint"].(*schema.ResourcePermissions) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Permissions["model_serving_my_model_serving_endpoint"])
assert.NotEmpty(t, resource.ServingEndpointId) assert.NotEmpty(t, resource.ServingEndpointId)
assert.Len(t, resource.AccessControl, 1) assert.Len(t, resource.AccessControl, 1)
assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName) assert.Equal(t, "jane@doe.com", resource.AccessControl[0].UserName)
assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel) assert.Equal(t, "CAN_VIEW", resource.AccessControl[0].PermissionLevel)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformRegisteredModel(t *testing.T) { func TestBundleToTerraformRegisteredModel(t *testing.T) {
@ -519,16 +527,15 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceRegisteredModel
resource := out.Resource.RegisteredModel["my_registered_model"].(*schema.ResourceRegisteredModel) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.RegisteredModel["my_registered_model"])
assert.Equal(t, "name", resource.Name) assert.Equal(t, "name", resource.Name)
assert.Equal(t, "catalog", resource.CatalogName) assert.Equal(t, "catalog", resource.CatalogName)
assert.Equal(t, "schema", resource.SchemaName) assert.Equal(t, "schema", resource.SchemaName)
assert.Equal(t, "comment", resource.Comment) assert.Equal(t, "comment", resource.Comment)
assert.Nil(t, out.Data) assert.Nil(t, out.Data)
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformRegisteredModelGrants(t *testing.T) { func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
@ -554,15 +561,14 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
}, },
} }
out := BundleToTerraform(&config) var resource schema.ResourceGrants
resource := out.Resource.Grants["registered_model_my_registered_model"].(*schema.ResourceGrants) out := produceTerraformConfiguration(t, config)
convertToResourceStruct(t, &resource, out.Resource.Grants["registered_model_my_registered_model"])
assert.NotEmpty(t, resource.Function) assert.NotEmpty(t, resource.Function)
assert.Len(t, resource.Grant, 1) assert.Len(t, resource.Grant, 1)
assert.Equal(t, "jane@doe.com", resource.Grant[0].Principal) assert.Equal(t, "jane@doe.com", resource.Grant[0].Principal)
assert.Equal(t, "EXECUTE", resource.Grant[0].Privileges[0]) assert.Equal(t, "EXECUTE", resource.Grant[0].Privileges[0])
bundleToTerraformEquivalenceTest(t, &config)
} }
func TestBundleToTerraformDeletedResources(t *testing.T) { func TestBundleToTerraformDeletedResources(t *testing.T) {
@ -1154,25 +1160,3 @@ func AssertFullResourceCoverage(t *testing.T, config *config.Root) {
} }
} }
} }
func assertEqualTerraformRoot(t *testing.T, a, b *schema.Root) {
ba, err := json.Marshal(a)
require.NoError(t, err)
bb, err := json.Marshal(b)
require.NoError(t, err)
assert.JSONEq(t, string(ba), string(bb))
}
func bundleToTerraformEquivalenceTest(t *testing.T, config *config.Root) {
t.Run("dyn equivalence", func(t *testing.T) {
tf1 := BundleToTerraform(config)
vin, err := convert.FromTyped(config, dyn.NilValue)
require.NoError(t, err)
tf2, err := BundleToTerraformWithDynValue(context.Background(), vin)
require.NoError(t, err)
// Compare roots
assertEqualTerraformRoot(t, tf1, tf2)
})
}

View File

@ -1,3 +1,3 @@
package schema package schema
const ProviderVersion = "1.53.0" const ProviderVersion = "1.54.0"

View File

@ -0,0 +1,15 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceNotificationDestinationsNotificationDestinations struct {
DestinationType string `json:"destination_type,omitempty"`
DisplayName string `json:"display_name,omitempty"`
Id string `json:"id,omitempty"`
}
type DataSourceNotificationDestinations struct {
DisplayNameContains string `json:"display_name_contains,omitempty"`
Type string `json:"type,omitempty"`
NotificationDestinations []DataSourceNotificationDestinationsNotificationDestinations `json:"notification_destinations,omitempty"`
}

View File

@ -0,0 +1,32 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceRegisteredModelModelInfoAliases struct {
AliasName string `json:"alias_name,omitempty"`
VersionNum int `json:"version_num,omitempty"`
}
type DataSourceRegisteredModelModelInfo struct {
BrowseOnly bool `json:"browse_only,omitempty"`
CatalogName string `json:"catalog_name,omitempty"`
Comment string `json:"comment,omitempty"`
CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
FullName string `json:"full_name,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name,omitempty"`
Owner string `json:"owner,omitempty"`
SchemaName string `json:"schema_name,omitempty"`
StorageLocation string `json:"storage_location,omitempty"`
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
Aliases []DataSourceRegisteredModelModelInfoAliases `json:"aliases,omitempty"`
}
type DataSourceRegisteredModel struct {
FullName string `json:"full_name"`
IncludeAliases bool `json:"include_aliases,omitempty"`
IncludeBrowse bool `json:"include_browse,omitempty"`
ModelInfo []DataSourceRegisteredModelModelInfo `json:"model_info,omitempty"`
}

View File

@ -36,7 +36,9 @@ type DataSources struct {
NodeType map[string]any `json:"databricks_node_type,omitempty"` NodeType map[string]any `json:"databricks_node_type,omitempty"`
Notebook map[string]any `json:"databricks_notebook,omitempty"` Notebook map[string]any `json:"databricks_notebook,omitempty"`
NotebookPaths map[string]any `json:"databricks_notebook_paths,omitempty"` NotebookPaths map[string]any `json:"databricks_notebook_paths,omitempty"`
NotificationDestinations map[string]any `json:"databricks_notification_destinations,omitempty"`
Pipelines map[string]any `json:"databricks_pipelines,omitempty"` Pipelines map[string]any `json:"databricks_pipelines,omitempty"`
RegisteredModel map[string]any `json:"databricks_registered_model,omitempty"`
Schema map[string]any `json:"databricks_schema,omitempty"` Schema map[string]any `json:"databricks_schema,omitempty"`
Schemas map[string]any `json:"databricks_schemas,omitempty"` Schemas map[string]any `json:"databricks_schemas,omitempty"`
ServicePrincipal map[string]any `json:"databricks_service_principal,omitempty"` ServicePrincipal map[string]any `json:"databricks_service_principal,omitempty"`
@ -92,7 +94,9 @@ func NewDataSources() *DataSources {
NodeType: make(map[string]any), NodeType: make(map[string]any),
Notebook: make(map[string]any), Notebook: make(map[string]any),
NotebookPaths: make(map[string]any), NotebookPaths: make(map[string]any),
NotificationDestinations: make(map[string]any),
Pipelines: make(map[string]any), Pipelines: make(map[string]any),
RegisteredModel: make(map[string]any),
Schema: make(map[string]any), Schema: make(map[string]any),
Schemas: make(map[string]any), Schemas: make(map[string]any),
ServicePrincipal: make(map[string]any), ServicePrincipal: make(map[string]any),

View File

@ -1448,6 +1448,7 @@ type ResourceJobWebhookNotifications struct {
type ResourceJob struct { type ResourceJob struct {
AlwaysRunning bool `json:"always_running,omitempty"` AlwaysRunning bool `json:"always_running,omitempty"`
BudgetPolicyId string `json:"budget_policy_id,omitempty"`
ControlRunState bool `json:"control_run_state,omitempty"` ControlRunState bool `json:"control_run_state,omitempty"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
EditMode string `json:"edit_mode,omitempty"` EditMode string `json:"edit_mode,omitempty"`

View File

@ -19,9 +19,10 @@ type ResourceOnlineTableSpec struct {
} }
type ResourceOnlineTable struct { type ResourceOnlineTable struct {
Id string `json:"id,omitempty"` Id string `json:"id,omitempty"`
Name string `json:"name"` Name string `json:"name"`
Status []any `json:"status,omitempty"` Status []any `json:"status,omitempty"`
TableServingUrl string `json:"table_serving_url,omitempty"` TableServingUrl string `json:"table_serving_url,omitempty"`
Spec *ResourceOnlineTableSpec `json:"spec,omitempty"` UnityCatalogProvisioningState string `json:"unity_catalog_provisioning_state,omitempty"`
Spec *ResourceOnlineTableSpec `json:"spec,omitempty"`
} }

View File

@ -142,10 +142,26 @@ type ResourcePipelineGatewayDefinition struct {
GatewayStorageSchema string `json:"gateway_storage_schema,omitempty"` GatewayStorageSchema string `json:"gateway_storage_schema,omitempty"`
} }
type ResourcePipelineIngestionDefinitionObjectsReportTableConfiguration struct {
PrimaryKeys []string `json:"primary_keys,omitempty"`
SalesforceIncludeFormulaFields bool `json:"salesforce_include_formula_fields,omitempty"`
ScdType string `json:"scd_type,omitempty"`
SequenceBy []string `json:"sequence_by,omitempty"`
}
type ResourcePipelineIngestionDefinitionObjectsReport struct {
DestinationCatalog string `json:"destination_catalog,omitempty"`
DestinationSchema string `json:"destination_schema,omitempty"`
DestinationTable string `json:"destination_table,omitempty"`
SourceUrl string `json:"source_url,omitempty"`
TableConfiguration *ResourcePipelineIngestionDefinitionObjectsReportTableConfiguration `json:"table_configuration,omitempty"`
}
type ResourcePipelineIngestionDefinitionObjectsSchemaTableConfiguration struct { type ResourcePipelineIngestionDefinitionObjectsSchemaTableConfiguration struct {
PrimaryKeys []string `json:"primary_keys,omitempty"` PrimaryKeys []string `json:"primary_keys,omitempty"`
SalesforceIncludeFormulaFields bool `json:"salesforce_include_formula_fields,omitempty"` SalesforceIncludeFormulaFields bool `json:"salesforce_include_formula_fields,omitempty"`
ScdType string `json:"scd_type,omitempty"` ScdType string `json:"scd_type,omitempty"`
SequenceBy []string `json:"sequence_by,omitempty"`
} }
type ResourcePipelineIngestionDefinitionObjectsSchema struct { type ResourcePipelineIngestionDefinitionObjectsSchema struct {
@ -160,6 +176,7 @@ type ResourcePipelineIngestionDefinitionObjectsTableTableConfiguration struct {
PrimaryKeys []string `json:"primary_keys,omitempty"` PrimaryKeys []string `json:"primary_keys,omitempty"`
SalesforceIncludeFormulaFields bool `json:"salesforce_include_formula_fields,omitempty"` SalesforceIncludeFormulaFields bool `json:"salesforce_include_formula_fields,omitempty"`
ScdType string `json:"scd_type,omitempty"` ScdType string `json:"scd_type,omitempty"`
SequenceBy []string `json:"sequence_by,omitempty"`
} }
type ResourcePipelineIngestionDefinitionObjectsTable struct { type ResourcePipelineIngestionDefinitionObjectsTable struct {
@ -173,6 +190,7 @@ type ResourcePipelineIngestionDefinitionObjectsTable struct {
} }
type ResourcePipelineIngestionDefinitionObjects struct { type ResourcePipelineIngestionDefinitionObjects struct {
Report *ResourcePipelineIngestionDefinitionObjectsReport `json:"report,omitempty"`
Schema *ResourcePipelineIngestionDefinitionObjectsSchema `json:"schema,omitempty"` Schema *ResourcePipelineIngestionDefinitionObjectsSchema `json:"schema,omitempty"`
Table *ResourcePipelineIngestionDefinitionObjectsTable `json:"table,omitempty"` Table *ResourcePipelineIngestionDefinitionObjectsTable `json:"table,omitempty"`
} }
@ -181,6 +199,7 @@ type ResourcePipelineIngestionDefinitionTableConfiguration struct {
PrimaryKeys []string `json:"primary_keys,omitempty"` PrimaryKeys []string `json:"primary_keys,omitempty"`
SalesforceIncludeFormulaFields bool `json:"salesforce_include_formula_fields,omitempty"` SalesforceIncludeFormulaFields bool `json:"salesforce_include_formula_fields,omitempty"`
ScdType string `json:"scd_type,omitempty"` ScdType string `json:"scd_type,omitempty"`
SequenceBy []string `json:"sequence_by,omitempty"`
} }
type ResourcePipelineIngestionDefinition struct { type ResourcePipelineIngestionDefinition struct {

View File

@ -21,7 +21,7 @@ type Root struct {
const ProviderHost = "registry.terraform.io" const ProviderHost = "registry.terraform.io"
const ProviderSource = "databricks/databricks" const ProviderSource = "databricks/databricks"
const ProviderVersion = "1.53.0" const ProviderVersion = "1.54.0"
func NewRoot() *Root { func NewRoot() *Root {
return &Root{ return &Root{

View File

@ -57,6 +57,12 @@ func IsLibraryLocal(dep string) bool {
} }
} }
// If the dependency starts with --, it's a pip flag option which is a valid
// entry for environment dependencies but not a local path
if containsPipFlag(dep) {
return false
}
// If the dependency is a requirements file, it's not a valid local path // If the dependency is a requirements file, it's not a valid local path
if strings.HasPrefix(dep, "-r") { if strings.HasPrefix(dep, "-r") {
return false return false
@ -70,6 +76,11 @@ func IsLibraryLocal(dep string) bool {
return IsLocalPath(dep) return IsLocalPath(dep)
} }
func containsPipFlag(input string) bool {
re := regexp.MustCompile(`--[a-zA-Z0-9-]+`)
return re.MatchString(input)
}
// ^[a-zA-Z0-9\-_]+: Matches the package name, allowing alphanumeric characters, dashes (-), and underscores (_). // ^[a-zA-Z0-9\-_]+: Matches the package name, allowing alphanumeric characters, dashes (-), and underscores (_).
// \[.*\])?: Optionally matches any extras specified in square brackets, e.g., [security]. // \[.*\])?: Optionally matches any extras specified in square brackets, e.g., [security].
// ((==|!=|<=|>=|~=|>|<)\d+(\.\d+){0,2}(\.\*)?): Optionally matches version specifiers, supporting various operators (==, !=, etc.) followed by a version number (e.g., 2.25.1). // ((==|!=|<=|>=|~=|>|<)\d+(\.\d+){0,2}(\.\*)?): Optionally matches version specifiers, supporting various operators (==, !=, etc.) followed by a version number (e.g., 2.25.1).

View File

@ -0,0 +1,56 @@
package permissions
import (
"context"
"fmt"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
)
type validateSharedRootPermissions struct {
}
func ValidateSharedRootPermissions() bundle.Mutator {
return &validateSharedRootPermissions{}
}
func (*validateSharedRootPermissions) Name() string {
return "ValidateSharedRootPermissions"
}
func (*validateSharedRootPermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if isWorkspaceSharedRoot(b.Config.Workspace.RootPath) {
return isUsersGroupPermissionSet(b)
}
return nil
}
func isWorkspaceSharedRoot(path string) bool {
return strings.HasPrefix(path, "/Workspace/Shared/")
}
// isUsersGroupPermissionSet checks that top-level permissions set for bundle contain group_name: users with CAN_MANAGE permission.
func isUsersGroupPermissionSet(b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
allUsers := false
for _, p := range b.Config.Permissions {
if p.GroupName == "users" && p.Level == CAN_MANAGE {
allUsers = true
break
}
}
if !allUsers {
diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning,
Summary: fmt.Sprintf("the bundle root path %s is writable by all workspace users", b.Config.Workspace.RootPath),
Detail: "The bundle is configured to use /Workspace/Shared, which will give read/write access to all users. If this is intentional, add CAN_MANAGE for 'group_name: users' permission to your bundle configuration. If the deployment should be restricted, move it to a restricted folder such as /Workspace/Users/<username or principal name>.",
})
}
return diags
}

View File

@ -0,0 +1,66 @@
package permissions
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/require"
)
func TestValidateSharedRootPermissionsForShared(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Workspace: config.Workspace{
RootPath: "/Workspace/Shared/foo/bar",
},
Permissions: []resources.Permission{
{Level: CAN_MANAGE, GroupName: "users"},
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job_1": {JobSettings: &jobs.JobSettings{Name: "job_1"}},
"job_2": {JobSettings: &jobs.JobSettings{Name: "job_2"}},
},
},
},
}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions()))
require.Empty(t, diags)
}
func TestValidateSharedRootPermissionsForSharedError(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Workspace: config.Workspace{
RootPath: "/Workspace/Shared/foo/bar",
},
Permissions: []resources.Permission{
{Level: CAN_MANAGE, UserName: "foo@bar.com"},
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job_1": {JobSettings: &jobs.JobSettings{Name: "job_1"}},
"job_2": {JobSettings: &jobs.JobSettings{Name: "job_2"}},
},
},
},
}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions()))
require.Len(t, diags, 1)
require.Equal(t, "the bundle root path /Workspace/Shared/foo/bar is writable by all workspace users", diags[0].Summary)
require.Equal(t, diag.Warning, diags[0].Severity)
}

View File

@ -16,6 +16,10 @@ func ApplyWorkspaceRootPermissions() bundle.Mutator {
return &workspaceRootPermissions{} return &workspaceRootPermissions{}
} }
func (*workspaceRootPermissions) Name() string {
return "ApplyWorkspaceRootPermissions"
}
// Apply implements bundle.Mutator. // Apply implements bundle.Mutator.
func (*workspaceRootPermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (*workspaceRootPermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := giveAccessForWorkspaceRoot(ctx, b) err := giveAccessForWorkspaceRoot(ctx, b)
@ -26,10 +30,6 @@ func (*workspaceRootPermissions) Apply(ctx context.Context, b *bundle.Bundle) di
return nil return nil
} }
func (*workspaceRootPermissions) Name() string {
return "ApplyWorkspaceRootPermissions"
}
func giveAccessForWorkspaceRoot(ctx context.Context, b *bundle.Bundle) error { func giveAccessForWorkspaceRoot(ctx context.Context, b *bundle.Bundle) error {
permissions := make([]workspace.WorkspaceObjectAccessControlRequest, 0) permissions := make([]workspace.WorkspaceObjectAccessControlRequest, 0)

View File

@ -69,6 +69,6 @@ func TestApplyWorkspaceRootPermissions(t *testing.T) {
WorkspaceObjectType: "directories", WorkspaceObjectType: "directories",
}).Return(nil, nil) }).Return(nil, nil)
diags := bundle.Apply(context.Background(), b, ApplyWorkspaceRootPermissions()) diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions(), ApplyWorkspaceRootPermissions()))
require.NoError(t, diags.Error()) require.Empty(t, diags)
} }

View File

@ -76,8 +76,11 @@ func Initialize() bundle.Mutator {
mutator.TranslatePaths(), mutator.TranslatePaths(),
trampoline.WrapperWarning(), trampoline.WrapperWarning(),
permissions.ValidateSharedRootPermissions(),
permissions.ApplyBundlePermissions(), permissions.ApplyBundlePermissions(),
permissions.FilterCurrentUser(), permissions.FilterCurrentUser(),
metadata.AnnotateJobs(), metadata.AnnotateJobs(),
metadata.AnnotatePipelines(), metadata.AnnotatePipelines(),
terraform.Initialize(), terraform.Initialize(),

View File

@ -1,9 +1,11 @@
package render package render
import ( import (
"context"
"fmt" "fmt"
"io" "io"
"path/filepath" "path/filepath"
"sort"
"strings" "strings"
"text/template" "text/template"
@ -29,7 +31,7 @@ var renderFuncMap = template.FuncMap{
}, },
} }
const summaryTemplate = `{{- if .Name -}} const summaryHeaderTemplate = `{{- if .Name -}}
Name: {{ .Name | bold }} Name: {{ .Name | bold }}
{{- if .Target }} {{- if .Target }}
Target: {{ .Target | bold }} Target: {{ .Target | bold }}
@ -46,12 +48,30 @@ Workspace:
Path: {{ .Path | bold }} Path: {{ .Path | bold }}
{{- end }} {{- end }}
{{- end }} {{- end }}
{{ end -}}`
{{ end -}} const resourcesTemplate = `Resources:
{{- range . }}
{{ .Trailer }} {{ .GroupName }}:
{{- range .Resources }}
{{ .Key | bold }}:
Name: {{ .Name }}
URL: {{ if .URL }}{{ .URL | cyan }}{{ else }}{{ "(not deployed)" | cyan }}{{ end }}
{{- end }}
{{- end }}
` `
type ResourceGroup struct {
GroupName string
Resources []ResourceInfo
}
type ResourceInfo struct {
Key string
Name string
URL string
}
func pluralize(n int, singular, plural string) string { func pluralize(n int, singular, plural string) string {
if n == 1 { if n == 1 {
return fmt.Sprintf("%d %s", n, singular) return fmt.Sprintf("%d %s", n, singular)
@ -74,20 +94,20 @@ func buildTrailer(diags diag.Diagnostics) string {
case len(parts) >= 3: case len(parts) >= 3:
first := strings.Join(parts[:len(parts)-1], ", ") first := strings.Join(parts[:len(parts)-1], ", ")
last := parts[len(parts)-1] last := parts[len(parts)-1]
return fmt.Sprintf("Found %s, and %s", first, last) return fmt.Sprintf("Found %s, and %s\n", first, last)
case len(parts) == 2: case len(parts) == 2:
return fmt.Sprintf("Found %s and %s", parts[0], parts[1]) return fmt.Sprintf("Found %s and %s\n", parts[0], parts[1])
case len(parts) == 1: case len(parts) == 1:
return fmt.Sprintf("Found %s", parts[0]) return fmt.Sprintf("Found %s\n", parts[0])
default: default:
// No diagnostics to print. // No diagnostics to print.
return color.GreenString("Validation OK!") return color.GreenString("Validation OK!\n")
} }
} }
func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error { func renderSummaryHeaderTemplate(out io.Writer, b *bundle.Bundle) error {
if b == nil { if b == nil {
return renderSummaryTemplate(out, &bundle.Bundle{}, diags) return renderSummaryHeaderTemplate(out, &bundle.Bundle{})
} }
var currentUser = &iam.User{} var currentUser = &iam.User{}
@ -98,20 +118,19 @@ func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnosti
} }
} }
t := template.Must(template.New("summary").Funcs(renderFuncMap).Parse(summaryTemplate)) t := template.Must(template.New("summary").Funcs(renderFuncMap).Parse(summaryHeaderTemplate))
err := t.Execute(out, map[string]any{ err := t.Execute(out, map[string]any{
"Name": b.Config.Bundle.Name, "Name": b.Config.Bundle.Name,
"Target": b.Config.Bundle.Target, "Target": b.Config.Bundle.Target,
"User": currentUser.UserName, "User": currentUser.UserName,
"Path": b.Config.Workspace.RootPath, "Path": b.Config.Workspace.RootPath,
"Host": b.Config.Workspace.Host, "Host": b.Config.Workspace.Host,
"Trailer": buildTrailer(diags),
}) })
return err return err
} }
func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error { func renderDiagnosticsOnly(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
for _, d := range diags { for _, d := range diags {
for i := range d.Locations { for i := range d.Locations {
if b == nil { if b == nil {
@ -139,19 +158,73 @@ type RenderOptions struct {
RenderSummaryTable bool RenderSummaryTable bool
} }
// RenderTextOutput renders the diagnostics in a human-readable format. // RenderDiagnostics renders the diagnostics in a human-readable format.
func RenderTextOutput(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics, opts RenderOptions) error { func RenderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics, opts RenderOptions) error {
err := renderDiagnostics(out, b, diags) err := renderDiagnosticsOnly(out, b, diags)
if err != nil { if err != nil {
return fmt.Errorf("failed to render diagnostics: %w", err) return fmt.Errorf("failed to render diagnostics: %w", err)
} }
if opts.RenderSummaryTable { if opts.RenderSummaryTable {
err = renderSummaryTemplate(out, b, diags) if b != nil {
if err != nil { err = renderSummaryHeaderTemplate(out, b)
return fmt.Errorf("failed to render summary: %w", err) if err != nil {
return fmt.Errorf("failed to render summary: %w", err)
}
io.WriteString(out, "\n")
} }
trailer := buildTrailer(diags)
io.WriteString(out, trailer)
} }
return nil return nil
} }
func RenderSummary(ctx context.Context, out io.Writer, b *bundle.Bundle) error {
if err := renderSummaryHeaderTemplate(out, b); err != nil {
return err
}
var resourceGroups []ResourceGroup
for _, group := range b.Config.Resources.AllResources() {
resources := make([]ResourceInfo, 0, len(group.Resources))
for key, resource := range group.Resources {
resources = append(resources, ResourceInfo{
Key: key,
Name: resource.GetName(),
URL: resource.GetURL(),
})
}
if len(resources) > 0 {
resourceGroups = append(resourceGroups, ResourceGroup{
GroupName: group.Description.PluralTitle,
Resources: resources,
})
}
}
if err := renderResourcesTemplate(out, resourceGroups); err != nil {
return fmt.Errorf("failed to render resources template: %w", err)
}
return nil
}
// Helper function to sort and render resource groups using the template
func renderResourcesTemplate(out io.Writer, resourceGroups []ResourceGroup) error {
// Sort everything to ensure consistent output
sort.Slice(resourceGroups, func(i, j int) bool {
return resourceGroups[i].GroupName < resourceGroups[j].GroupName
})
for _, group := range resourceGroups {
sort.Slice(group.Resources, func(i, j int) bool {
return group.Resources[i].Key < group.Resources[j].Key
})
}
t := template.Must(template.New("resources").Funcs(renderFuncMap).Parse(resourcesTemplate))
return t.Execute(out, resourceGroups)
}

View File

@ -2,14 +2,21 @@ package render
import ( import (
"bytes" "bytes"
"context"
"io"
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/iam"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/databricks/databricks-sdk-go/service/serving"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -326,7 +333,7 @@ func TestRenderTextOutput(t *testing.T) {
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
writer := &bytes.Buffer{} writer := &bytes.Buffer{}
err := RenderTextOutput(writer, tc.bundle, tc.diags, tc.opts) err := RenderDiagnostics(writer, tc.bundle, tc.diags, tc.opts)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, tc.expected, writer.String()) assert.Equal(t, tc.expected, writer.String())
@ -468,7 +475,7 @@ func TestRenderDiagnostics(t *testing.T) {
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
writer := &bytes.Buffer{} writer := &bytes.Buffer{}
err := renderDiagnostics(writer, bundle, tc.diags) err := renderDiagnosticsOnly(writer, bundle, tc.diags)
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, tc.expected, writer.String()) assert.Equal(t, tc.expected, writer.String())
@ -479,8 +486,105 @@ func TestRenderDiagnostics(t *testing.T) {
func TestRenderSummaryTemplate_nilBundle(t *testing.T) { func TestRenderSummaryTemplate_nilBundle(t *testing.T) {
writer := &bytes.Buffer{} writer := &bytes.Buffer{}
err := renderSummaryTemplate(writer, nil, nil) err := renderSummaryHeaderTemplate(writer, nil)
require.NoError(t, err) require.NoError(t, err)
io.WriteString(writer, buildTrailer(nil))
assert.Equal(t, "Validation OK!\n", writer.String()) assert.Equal(t, "Validation OK!\n", writer.String())
} }
func TestRenderSummary(t *testing.T) {
ctx := context.Background()
// Create a mock bundle with various resources
b := &bundle.Bundle{
Config: config.Root{
Bundle: config.Bundle{
Name: "test-bundle",
Target: "test-target",
},
Workspace: config.Workspace{
Host: "https://mycompany.databricks.com/",
},
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {
ID: "1",
URL: "https://url1",
JobSettings: &jobs.JobSettings{Name: "job1-name"},
},
"job2": {
ID: "2",
URL: "https://url2",
JobSettings: &jobs.JobSettings{Name: "job2-name"},
},
},
Pipelines: map[string]*resources.Pipeline{
"pipeline2": {
ID: "4",
// no URL
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline2-name"},
},
"pipeline1": {
ID: "3",
URL: "https://url3",
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1-name"},
},
},
Schemas: map[string]*resources.Schema{
"schema1": {
ID: "catalog.schema",
CreateSchema: &catalog.CreateSchema{
Name: "schema",
},
// no URL
},
},
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
"endpoint1": {
ID: "7",
CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "my_serving_endpoint",
},
URL: "https://url4",
},
},
},
},
}
writer := &bytes.Buffer{}
err := RenderSummary(ctx, writer, b)
require.NoError(t, err)
expectedSummary := `Name: test-bundle
Target: test-target
Workspace:
Host: https://mycompany.databricks.com/
Resources:
Jobs:
job1:
Name: job1-name
URL: https://url1
job2:
Name: job2-name
URL: https://url2
Model Serving Endpoints:
endpoint1:
Name: my_serving_endpoint
URL: https://url4
Pipelines:
pipeline1:
Name: pipeline1-name
URL: https://url3
pipeline2:
Name: pipeline2-name
URL: (not deployed)
Schemas:
schema1:
Name: schema
URL: (not deployed)
`
assert.Equal(t, expectedSummary, writer.String())
}

View File

@ -317,6 +317,29 @@ func (r *jobRunner) Cancel(ctx context.Context) error {
return errGroup.Wait() return errGroup.Wait()
} }
func (r *jobRunner) Restart(ctx context.Context, opts *Options) (output.RunOutput, error) {
// We don't need to cancel existing runs if the job is continuous and unpaused.
// the /jobs/run-now API will automatically cancel any existing runs before starting a new one.
//
// /jobs/run-now will not cancel existing runs if the job is continuous and paused.
// New job runs will be queued instead and will wait for existing runs to finish.
// In this case, we need to cancel the existing runs before starting a new one.
continuous := r.job.JobSettings.Continuous
if continuous != nil && continuous.PauseStatus == jobs.PauseStatusUnpaused {
return r.Run(ctx, opts)
}
s := cmdio.Spinner(ctx)
s <- "Cancelling all active job runs"
err := r.Cancel(ctx)
close(s)
if err != nil {
return nil, err
}
return r.Run(ctx, opts)
}
func (r *jobRunner) ParseArgs(args []string, opts *Options) error { func (r *jobRunner) ParseArgs(args []string, opts *Options) error {
return r.posArgsHandler().ParseArgs(args, opts) return r.posArgsHandler().ParseArgs(args, opts)
} }

View File

@ -1,6 +1,7 @@
package run package run
import ( import (
"bytes"
"context" "context"
"testing" "testing"
"time" "time"
@ -8,6 +9,8 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
@ -126,3 +129,132 @@ func TestJobRunnerCancelWithNoActiveRuns(t *testing.T) {
err := runner.Cancel(context.Background()) err := runner.Cancel(context.Background())
require.NoError(t, err) require.NoError(t, err)
} }
func TestJobRunnerRestart(t *testing.T) {
for _, jobSettings := range []*jobs.JobSettings{
{},
{
Continuous: &jobs.Continuous{
PauseStatus: jobs.PauseStatusPaused,
},
},
} {
job := &resources.Job{
ID: "123",
JobSettings: jobSettings,
}
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test_job": job,
},
},
},
}
runner := jobRunner{key: "test", bundle: b, job: job}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
ctx := context.Background()
ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", ""))
ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend))
jobApi := m.GetMockJobsAPI()
jobApi.EXPECT().ListRunsAll(mock.Anything, jobs.ListRunsRequest{
ActiveOnly: true,
JobId: 123,
}).Return([]jobs.BaseRun{
{RunId: 1},
{RunId: 2},
}, nil)
// Mock the runner cancelling existing job runs.
mockWait := &jobs.WaitGetRunJobTerminatedOrSkipped[struct{}]{
Poll: func(time time.Duration, f func(j *jobs.Run)) (*jobs.Run, error) {
return nil, nil
},
}
jobApi.EXPECT().CancelRun(mock.Anything, jobs.CancelRun{
RunId: 1,
}).Return(mockWait, nil)
jobApi.EXPECT().CancelRun(mock.Anything, jobs.CancelRun{
RunId: 2,
}).Return(mockWait, nil)
// Mock the runner triggering a job run
mockWaitForRun := &jobs.WaitGetRunJobTerminatedOrSkipped[jobs.RunNowResponse]{
Poll: func(d time.Duration, f func(*jobs.Run)) (*jobs.Run, error) {
return &jobs.Run{
State: &jobs.RunState{
ResultState: jobs.RunResultStateSuccess,
},
}, nil
},
}
jobApi.EXPECT().RunNow(mock.Anything, jobs.RunNow{
JobId: 123,
}).Return(mockWaitForRun, nil)
// Mock the runner getting the job output
jobApi.EXPECT().GetRun(mock.Anything, jobs.GetRunRequest{}).Return(&jobs.Run{}, nil)
_, err := runner.Restart(ctx, &Options{})
require.NoError(t, err)
}
}
func TestJobRunnerRestartForContinuousUnpausedJobs(t *testing.T) {
job := &resources.Job{
ID: "123",
JobSettings: &jobs.JobSettings{
Continuous: &jobs.Continuous{
PauseStatus: jobs.PauseStatusUnpaused,
},
},
}
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test_job": job,
},
},
},
}
runner := jobRunner{key: "test", bundle: b, job: job}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
ctx := context.Background()
ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "..."))
ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend))
jobApi := m.GetMockJobsAPI()
// The runner should not try and cancel existing job runs for unpaused continuous jobs.
jobApi.AssertNotCalled(t, "ListRunsAll")
jobApi.AssertNotCalled(t, "CancelRun")
// Mock the runner triggering a job run
mockWaitForRun := &jobs.WaitGetRunJobTerminatedOrSkipped[jobs.RunNowResponse]{
Poll: func(d time.Duration, f func(*jobs.Run)) (*jobs.Run, error) {
return &jobs.Run{
State: &jobs.RunState{
ResultState: jobs.RunResultStateSuccess,
},
}, nil
},
}
jobApi.EXPECT().RunNow(mock.Anything, jobs.RunNow{
JobId: 123,
}).Return(mockWaitForRun, nil)
// Mock the runner getting the job output
jobApi.EXPECT().GetRun(mock.Anything, jobs.GetRunRequest{}).Return(&jobs.Run{}, nil)
_, err := runner.Restart(ctx, &Options{})
require.NoError(t, err)
}

View File

@ -183,6 +183,18 @@ func (r *pipelineRunner) Cancel(ctx context.Context) error {
return err return err
} }
func (r *pipelineRunner) Restart(ctx context.Context, opts *Options) (output.RunOutput, error) {
s := cmdio.Spinner(ctx)
s <- "Cancelling the active pipeline update"
err := r.Cancel(ctx)
close(s)
if err != nil {
return nil, err
}
return r.Run(ctx, opts)
}
func (r *pipelineRunner) ParseArgs(args []string, opts *Options) error { func (r *pipelineRunner) ParseArgs(args []string, opts *Options) error {
if len(args) == 0 { if len(args) == 0 {
return nil return nil

View File

@ -1,6 +1,7 @@
package run package run
import ( import (
"bytes"
"context" "context"
"testing" "testing"
"time" "time"
@ -8,8 +9,12 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
sdk_config "github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -47,3 +52,68 @@ func TestPipelineRunnerCancel(t *testing.T) {
err := runner.Cancel(context.Background()) err := runner.Cancel(context.Background())
require.NoError(t, err) require.NoError(t, err)
} }
func TestPipelineRunnerRestart(t *testing.T) {
pipeline := &resources.Pipeline{
ID: "123",
}
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"test_pipeline": pipeline,
},
},
},
}
runner := pipelineRunner{key: "test", bundle: b, pipeline: pipeline}
m := mocks.NewMockWorkspaceClient(t)
m.WorkspaceClient.Config = &sdk_config.Config{
Host: "https://test.com",
}
b.SetWorkpaceClient(m.WorkspaceClient)
ctx := context.Background()
ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "..."))
ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend))
mockWait := &pipelines.WaitGetPipelineIdle[struct{}]{
Poll: func(time.Duration, func(*pipelines.GetPipelineResponse)) (*pipelines.GetPipelineResponse, error) {
return nil, nil
},
}
pipelineApi := m.GetMockPipelinesAPI()
pipelineApi.EXPECT().Stop(mock.Anything, pipelines.StopRequest{
PipelineId: "123",
}).Return(mockWait, nil)
pipelineApi.EXPECT().GetByPipelineId(mock.Anything, "123").Return(&pipelines.GetPipelineResponse{}, nil)
// Mock runner starting a new update
pipelineApi.EXPECT().StartUpdate(mock.Anything, pipelines.StartUpdate{
PipelineId: "123",
}).Return(&pipelines.StartUpdateResponse{
UpdateId: "456",
}, nil)
// Mock runner polling for events
pipelineApi.EXPECT().ListPipelineEventsAll(mock.Anything, pipelines.ListPipelineEventsRequest{
Filter: `update_id = '456'`,
MaxResults: 100,
PipelineId: "123",
}).Return([]pipelines.PipelineEvent{}, nil)
// Mock runner polling for update status
pipelineApi.EXPECT().GetUpdateByPipelineIdAndUpdateId(mock.Anything, "123", "456").
Return(&pipelines.GetUpdateResponse{
Update: &pipelines.UpdateInfo{
State: pipelines.UpdateInfoStateCompleted,
},
}, nil)
_, err := runner.Restart(ctx, &Options{})
require.NoError(t, err)
}

View File

@ -27,6 +27,10 @@ type Runner interface {
// Run the underlying worklow. // Run the underlying worklow.
Run(ctx context.Context, opts *Options) (output.RunOutput, error) Run(ctx context.Context, opts *Options) (output.RunOutput, error)
// Restart the underlying workflow by cancelling any existing runs before
// starting a new one.
Restart(ctx context.Context, opts *Options) (output.RunOutput, error)
// Cancel the underlying workflow. // Cancel the underlying workflow.
Cancel(ctx context.Context) error Cancel(ctx context.Context) error

View File

@ -209,6 +209,10 @@
{ {
"type": "object", "type": "object",
"properties": { "properties": {
"budget_policy_id": {
"description": "The id of the user specified budget policy to use for this job.\nIf not specified, a default budget policy may be applied when creating or modifying the job.\nSee `effective_budget_policy_id` for the budget policy used by this workload.",
"$ref": "#/$defs/string"
},
"continuous": { "continuous": {
"description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.", "description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous" "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous"
@ -3901,6 +3905,10 @@
{ {
"type": "object", "type": "object",
"properties": { "properties": {
"report": {
"description": "Select tables from a specific source report.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec"
},
"schema": { "schema": {
"description": "Select tables from a specific source schema.", "description": "Select tables from a specific source schema.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec" "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec"
@ -4233,6 +4241,40 @@
} }
] ]
}, },
"pipelines.ReportSpec": {
"anyOf": [
{
"type": "object",
"properties": {
"destination_catalog": {
"description": "Required. Destination catalog to store table.",
"$ref": "#/$defs/string"
},
"destination_schema": {
"description": "Required. Destination schema to store table.",
"$ref": "#/$defs/string"
},
"destination_table": {
"description": "Required. Destination table name. The pipeline fails if a table with that name already exists.",
"$ref": "#/$defs/string"
},
"source_url": {
"description": "Required. Report URL in the source system.",
"$ref": "#/$defs/string"
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.SchemaSpec": { "pipelines.SchemaSpec": {
"anyOf": [ "anyOf": [
{ {
@ -4281,7 +4323,7 @@
"$ref": "#/$defs/string" "$ref": "#/$defs/string"
}, },
"destination_table": { "destination_table": {
"description": "Optional. Destination table name. The pipeline fails If a table with that name already exists. If not set, the source table name is used.", "description": "Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.",
"$ref": "#/$defs/string" "$ref": "#/$defs/string"
}, },
"source_catalog": { "source_catalog": {
@ -4329,6 +4371,10 @@
"SCD_TYPE_1", "SCD_TYPE_1",
"SCD_TYPE_2" "SCD_TYPE_2"
] ]
},
"sequence_by": {
"description": "The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.",
"$ref": "#/$defs/slice/string"
} }
}, },
"additionalProperties": false "additionalProperties": false

View File

@ -0,0 +1,33 @@
bundle:
name: issue_1828
variables:
# One entry for each of the underlying YAML (or [dyn.Kind]) types.
# The test confirms we can convert to and from the typed configuration without losing information.
map:
default:
foo: bar
sequence:
default:
- foo
- bar
string:
default: foo
bool:
default: true
int:
default: 42
float:
default: 3.14
time:
default: 2021-01-01
nil:
default:

View File

@ -0,0 +1,48 @@
package config_tests
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestIssue1828(t *testing.T) {
b := load(t, "./issue_1828")
if assert.Contains(t, b.Config.Variables, "map") {
assert.Equal(t, map[string]any{
"foo": "bar",
}, b.Config.Variables["map"].Default)
}
if assert.Contains(t, b.Config.Variables, "sequence") {
assert.Equal(t, []any{
"foo",
"bar",
}, b.Config.Variables["sequence"].Default)
}
if assert.Contains(t, b.Config.Variables, "string") {
assert.Equal(t, "foo", b.Config.Variables["string"].Default)
}
if assert.Contains(t, b.Config.Variables, "bool") {
assert.Equal(t, true, b.Config.Variables["bool"].Default)
}
if assert.Contains(t, b.Config.Variables, "int") {
assert.Equal(t, 42, b.Config.Variables["int"].Default)
}
if assert.Contains(t, b.Config.Variables, "float") {
assert.Equal(t, 3.14, b.Config.Variables["float"].Default)
}
if assert.Contains(t, b.Config.Variables, "time") {
assert.Equal(t, "2021-01-01", b.Config.Variables["time"].Default)
}
if assert.Contains(t, b.Config.Variables, "nil") {
assert.Equal(t, nil, b.Config.Variables["nil"].Default)
}
}

View File

@ -78,7 +78,7 @@ func newDeployCommand() *cobra.Command {
} }
renderOpts := render.RenderOptions{RenderSummaryTable: false} renderOpts := render.RenderOptions{RenderSummaryTable: false}
err := render.RenderTextOutput(cmd.OutOrStdout(), b, diags, renderOpts) err := render.RenderDiagnostics(cmd.OutOrStdout(), b, diags, renderOpts)
if err != nil { if err != nil {
return fmt.Errorf("failed to render output: %w", err) return fmt.Errorf("failed to render output: %w", err)
} }

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/bundle/run" "github.com/databricks/cli/bundle/run"
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/bundle/utils"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
@ -100,19 +101,16 @@ task or a Python wheel task, the second example applies.
} }
runOptions.NoWait = noWait runOptions.NoWait = noWait
var output output.RunOutput
if restart { if restart {
s := cmdio.Spinner(ctx) output, err = runner.Restart(ctx, &runOptions)
s <- "Cancelling all runs" } else {
err := runner.Cancel(ctx) output, err = runner.Run(ctx, &runOptions)
close(s)
if err != nil {
return err
}
} }
output, err := runner.Run(ctx, &runOptions)
if err != nil { if err != nil {
return err return err
} }
if output != nil { if output != nil {
switch root.OutputType(cmd) { switch root.OutputType(cmd) {
case flags.OutputText: case flags.OutputText:

View File

@ -8,8 +8,10 @@ import (
"path/filepath" "path/filepath"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/bundle/render"
"github.com/databricks/cli/cmd/bundle/utils" "github.com/databricks/cli/cmd/bundle/utils"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/flags" "github.com/databricks/cli/libs/flags"
@ -19,11 +21,8 @@ import (
func newSummaryCommand() *cobra.Command { func newSummaryCommand() *cobra.Command {
cmd := &cobra.Command{ cmd := &cobra.Command{
Use: "summary", Use: "summary",
Short: "Describe the bundle resources and their deployment states", Short: "Summarize resources deployed by this bundle",
Args: root.NoArgs, Args: root.NoArgs,
// This command is currently intended for the Databricks VSCode extension only
Hidden: true,
} }
var forcePull bool var forcePull bool
@ -60,14 +59,15 @@ func newSummaryCommand() *cobra.Command {
} }
} }
diags = bundle.Apply(ctx, b, terraform.Load()) diags = bundle.Apply(ctx, b,
bundle.Seq(terraform.Load(), mutator.InitializeURLs()))
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
switch root.OutputType(cmd) { switch root.OutputType(cmd) {
case flags.OutputText: case flags.OutputText:
return fmt.Errorf("%w, only json output is supported", errors.ErrUnsupported) return render.RenderSummary(ctx, cmd.OutOrStdout(), b)
case flags.OutputJSON: case flags.OutputJSON:
buf, err := json.MarshalIndent(b.Config, "", " ") buf, err := json.MarshalIndent(b.Config, "", " ")
if err != nil { if err != nil {

View File

@ -54,7 +54,7 @@ func newValidateCommand() *cobra.Command {
switch root.OutputType(cmd) { switch root.OutputType(cmd) {
case flags.OutputText: case flags.OutputText:
renderOpts := render.RenderOptions{RenderSummaryTable: true} renderOpts := render.RenderOptions{RenderSummaryTable: true}
err := render.RenderTextOutput(cmd.OutOrStdout(), b, diags, renderOpts) err := render.RenderDiagnostics(cmd.OutOrStdout(), b, diags, renderOpts)
if err != nil { if err != nil {
return fmt.Errorf("failed to render output: %w", err) return fmt.Errorf("failed to render output: %w", err)
} }

View File

@ -28,9 +28,6 @@ func New() *cobra.Command {
Annotations: map[string]string{ Annotations: map[string]string{
"package": "apps", "package": "apps",
}, },
// This service is being previewed; hide from help output.
Hidden: true,
} }
// Add methods // Add methods

View File

@ -0,0 +1,220 @@
// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
package disable_legacy_dbfs
import (
"fmt"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/settings"
"github.com/spf13/cobra"
)
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var cmdOverrides []func(*cobra.Command)
func New() *cobra.Command {
cmd := &cobra.Command{
Use: "disable-legacy-dbfs",
Short: `When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new mounts).`,
Long: `When this setting is on, access to DBFS root and DBFS mounts is disallowed (as
well as creation of new mounts). When the setting is off, all DBFS
functionality is enabled`,
// This service is being previewed; hide from help output.
Hidden: true,
}
// Add methods
cmd.AddCommand(newDelete())
cmd.AddCommand(newGet())
cmd.AddCommand(newUpdate())
// Apply optional overrides to this command.
for _, fn := range cmdOverrides {
fn(cmd)
}
return cmd
}
// start delete command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func(
*cobra.Command,
*settings.DeleteDisableLegacyDbfsRequest,
)
func newDelete() *cobra.Command {
cmd := &cobra.Command{}
var deleteReq settings.DeleteDisableLegacyDbfsRequest
// TODO: short flags
cmd.Flags().StringVar(&deleteReq.Etag, "etag", deleteReq.Etag, `etag used for versioning.`)
cmd.Use = "delete"
cmd.Short = `Delete the disable legacy DBFS setting.`
cmd.Long = `Delete the disable legacy DBFS setting.
Deletes the disable legacy DBFS setting for a workspace, reverting back to the
default.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response, err := w.Settings.DisableLegacyDbfs().Delete(ctx, deleteReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range deleteOverrides {
fn(cmd, &deleteReq)
}
return cmd
}
// start get command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func(
*cobra.Command,
*settings.GetDisableLegacyDbfsRequest,
)
func newGet() *cobra.Command {
cmd := &cobra.Command{}
var getReq settings.GetDisableLegacyDbfsRequest
// TODO: short flags
cmd.Flags().StringVar(&getReq.Etag, "etag", getReq.Etag, `etag used for versioning.`)
cmd.Use = "get"
cmd.Short = `Get the disable legacy DBFS setting.`
cmd.Long = `Get the disable legacy DBFS setting.
Gets the disable legacy DBFS setting.`
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(0)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
response, err := w.Settings.DisableLegacyDbfs().Get(ctx, getReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range getOverrides {
fn(cmd, &getReq)
}
return cmd
}
// start update command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func(
*cobra.Command,
*settings.UpdateDisableLegacyDbfsRequest,
)
func newUpdate() *cobra.Command {
cmd := &cobra.Command{}
var updateReq settings.UpdateDisableLegacyDbfsRequest
var updateJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Use = "update"
cmd.Short = `Update the disable legacy DBFS setting.`
cmd.Long = `Update the disable legacy DBFS setting.
Updates the disable legacy DBFS setting for the workspace.`
cmd.Annotations = make(map[string]string)
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
diags := updateJson.Unmarshal(&updateReq)
if diags.HasError() {
return diags.Error()
}
if len(diags) > 0 {
err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags)
if err != nil {
return err
}
}
} else {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}
response, err := w.Settings.DisableLegacyDbfs().Update(ctx, updateReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range updateOverrides {
fn(cmd, &updateReq)
}
return cmd
}
// end service DisableLegacyDbfs

View File

@ -1557,6 +1557,7 @@ func newSubmit() *cobra.Command {
cmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`)
// TODO: array: access_control_list // TODO: array: access_control_list
cmd.Flags().StringVar(&submitReq.BudgetPolicyId, "budget-policy-id", submitReq.BudgetPolicyId, `The user specified id of the budget policy to use for this one-time run.`)
// TODO: complex arg: email_notifications // TODO: complex arg: email_notifications
// TODO: array: environments // TODO: array: environments
// TODO: complex arg: git_source // TODO: complex arg: git_source

View File

@ -9,6 +9,7 @@ import (
compliance_security_profile "github.com/databricks/cli/cmd/workspace/compliance-security-profile" compliance_security_profile "github.com/databricks/cli/cmd/workspace/compliance-security-profile"
default_namespace "github.com/databricks/cli/cmd/workspace/default-namespace" default_namespace "github.com/databricks/cli/cmd/workspace/default-namespace"
disable_legacy_access "github.com/databricks/cli/cmd/workspace/disable-legacy-access" disable_legacy_access "github.com/databricks/cli/cmd/workspace/disable-legacy-access"
disable_legacy_dbfs "github.com/databricks/cli/cmd/workspace/disable-legacy-dbfs"
enhanced_security_monitoring "github.com/databricks/cli/cmd/workspace/enhanced-security-monitoring" enhanced_security_monitoring "github.com/databricks/cli/cmd/workspace/enhanced-security-monitoring"
restrict_workspace_admins "github.com/databricks/cli/cmd/workspace/restrict-workspace-admins" restrict_workspace_admins "github.com/databricks/cli/cmd/workspace/restrict-workspace-admins"
) )
@ -33,6 +34,7 @@ func New() *cobra.Command {
cmd.AddCommand(compliance_security_profile.New()) cmd.AddCommand(compliance_security_profile.New())
cmd.AddCommand(default_namespace.New()) cmd.AddCommand(default_namespace.New())
cmd.AddCommand(disable_legacy_access.New()) cmd.AddCommand(disable_legacy_access.New())
cmd.AddCommand(disable_legacy_dbfs.New())
cmd.AddCommand(enhanced_security_monitoring.New()) cmd.AddCommand(enhanced_security_monitoring.New())
cmd.AddCommand(restrict_workspace_admins.New()) cmd.AddCommand(restrict_workspace_admins.New())

2
go.mod
View File

@ -7,7 +7,7 @@ toolchain go1.22.7
require ( require (
github.com/Masterminds/semver/v3 v3.3.0 // MIT github.com/Masterminds/semver/v3 v3.3.0 // MIT
github.com/briandowns/spinner v1.23.1 // Apache 2.0 github.com/briandowns/spinner v1.23.1 // Apache 2.0
github.com/databricks/databricks-sdk-go v0.48.0 // Apache 2.0 github.com/databricks/databricks-sdk-go v0.49.0 // Apache 2.0
github.com/fatih/color v1.17.0 // MIT github.com/fatih/color v1.17.0 // MIT
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/google/uuid v1.6.0 // BSD-3-Clause

4
go.sum generated
View File

@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/databricks/databricks-sdk-go v0.48.0 h1:46KtsnRo+FGhC3izUXbpL0PXBNomvsdignYDhJZlm9s= github.com/databricks/databricks-sdk-go v0.49.0 h1:VBTeZZMLIuBSM4kxOCfUcW9z4FUQZY2QeNRD5qm9FUQ=
github.com/databricks/databricks-sdk-go v0.48.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= github.com/databricks/databricks-sdk-go v0.49.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=

View File

@ -2,11 +2,14 @@ package acc
import ( import (
"context" "context"
"fmt"
"os" "os"
"testing" "testing"
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -94,3 +97,30 @@ func (t *WorkspaceT) RunPython(code string) (string, error) {
require.True(t, ok, "unexpected type %T", results.Data) require.True(t, ok, "unexpected type %T", results.Data)
return output, nil return output, nil
} }
func (t *WorkspaceT) TemporaryWorkspaceDir(name ...string) string {
ctx := context.Background()
me, err := t.W.CurrentUser.Me(ctx)
require.NoError(t, err)
basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName(name...))
t.Logf("Creating %s", basePath)
err = t.W.Workspace.MkdirsByPath(ctx, basePath)
require.NoError(t, err)
// Remove test directory on test completion.
t.Cleanup(func() {
t.Logf("Removing %s", basePath)
err := t.W.Workspace.Delete(ctx, workspace.Delete{
Path: basePath,
Recursive: true,
})
if err == nil || apierr.IsMissing(err) {
return
}
t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err)
})
return basePath
}

View File

@ -0,0 +1,13 @@
# Bugbash
The script in this directory can be used to conveniently exec into a shell
where a CLI build for a specific branch is made available.
## Usage
This script prompts if you do NOT have at least Bash 5 installed,
but works without command completion with earlier versions.
```shell
bash <(curl -fsSL https://raw.githubusercontent.com/databricks/cli/main/internal/bugbash/exec.sh) my-branch
```

139
internal/bugbash/exec.sh Executable file
View File

@ -0,0 +1,139 @@
#!/usr/bin/env bash
set -euo pipefail
# Set the GitHub repository for the Databricks CLI.
export GH_REPO="databricks/cli"
# Synthesize the directory name for the snapshot build.
function cli_snapshot_directory() {
dir="cli"
# Append OS
case "$(uname -s)" in
Linux)
dir="${dir}_linux"
;;
Darwin)
dir="${dir}_darwin"
;;
*)
echo "Unknown operating system: $os"
;;
esac
# Append architecture
case "$(uname -m)" in
x86_64)
dir="${dir}_amd64_v1"
;;
i386|i686)
dir="${dir}_386"
;;
arm64|aarch64)
dir="${dir}_arm64"
;;
armv7l|armv8l)
dir="${dir}_arm_6"
;;
*)
echo "Unknown architecture: $arch"
;;
esac
echo $dir
}
BRANCH=$1
shift
# Default to main branch if branch is not specified.
if [ -z "$BRANCH" ]; then
BRANCH=main
fi
if [ -z "$BRANCH" ]; then
echo "Please specify which branch to bugbash..."
exit 1
fi
# Check if the "gh" command is available.
if ! command -v gh &> /dev/null; then
echo "The GitHub CLI (gh) is required to download the snapshot build."
echo "Install and configure it with:"
echo ""
echo " brew install gh"
echo " gh auth login"
echo ""
exit 1
fi
echo "Looking for a snapshot build of the Databricks CLI on branch $BRANCH..."
# Find last successful build on $BRANCH.
last_successful_run_id=$(
gh run list -b "$BRANCH" -w release-snapshot --json 'databaseId,conclusion' |
jq 'limit(1; .[] | select(.conclusion == "success")) | .databaseId'
)
if [ -z "$last_successful_run_id" ]; then
echo "Unable to find last successful build of the release-snapshot workflow for branch $BRANCH."
exit 1
fi
# Determine artifact name with the right binaries for this runner.
case "$(uname -s)" in
Linux)
artifact="cli_linux_snapshot"
;;
Darwin)
artifact="cli_darwin_snapshot"
;;
esac
# Create a temporary directory to download the artifact.
dir=$(mktemp -d)
# Download the artifact.
echo "Downloading the snapshot build..."
gh run download "$last_successful_run_id" -n "$artifact" -D "$dir/.bin"
dir="$dir/.bin/$(cli_snapshot_directory)"
if [ ! -d "$dir" ]; then
echo "Directory does not exist: $dir"
exit 1
fi
# Make CLI available on $PATH.
chmod +x "$dir/databricks"
export PATH="$dir:$PATH"
# Set the prompt to indicate the bugbash environment and exec.
export PS1="(bugbash $BRANCH) \[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ "
# Display completion instructions.
echo ""
echo "=================================================================="
if [[ ${BASH_VERSINFO[0]} -lt 5 ]]; then
echo -en "\033[31m"
echo "You have Bash version < 5 installed... completion won't work."
echo -en "\033[0m"
echo ""
echo "Install it with:"
echo ""
echo " brew install bash bash-completion"
echo ""
echo "=================================================================="
fi
echo ""
echo "To load completions in your current shell session:"
echo ""
echo " source /opt/homebrew/etc/profile.d/bash_completion.sh"
echo " source <(databricks completion bash)"
echo ""
echo "=================================================================="
echo ""
# Exec into a new shell.
# Note: don't use zsh because on macOS it _always_ overwrites PS1.
exec /usr/bin/env bash

View File

@ -0,0 +1,110 @@
package internal
import (
"encoding/base64"
"testing"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/databricks-sdk-go/apierr"
"github.com/databricks/databricks-sdk-go/service/dashboards"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// Verify that importing a dashboard through the Workspace API retains the identity of the underying resource,
// as well as properties exclusively accessible through the dashboards API.
func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
t.Parallel()
dashboardName := "New Dashboard"
dashboardPayload := []byte(`{"pages":[{"name":"2506f97a","displayName":"New Page"}]}`)
warehouseId := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
dir := wt.TemporaryWorkspaceDir("dashboard-assumptions-")
dashboard, err := wt.W.Lakeview.Create(ctx, dashboards.CreateDashboardRequest{
DisplayName: dashboardName,
ParentPath: dir,
SerializedDashboard: string(dashboardPayload),
WarehouseId: warehouseId,
})
require.NoError(t, err)
t.Logf("Dashboard ID (per Lakeview API): %s", dashboard.DashboardId)
// Overwrite the dashboard via the workspace API.
{
err := wt.W.Workspace.Import(ctx, workspace.Import{
Format: workspace.ImportFormatAuto,
Path: dashboard.Path,
Content: base64.StdEncoding.EncodeToString(dashboardPayload),
Overwrite: true,
})
require.NoError(t, err)
}
// Cross-check consistency with the workspace object.
{
obj, err := wt.W.Workspace.GetStatusByPath(ctx, dashboard.Path)
require.NoError(t, err)
// Confirm that the resource ID included in the response is equal to the dashboard ID.
require.Equal(t, dashboard.DashboardId, obj.ResourceId)
t.Logf("Dashboard ID (per workspace object status): %s", obj.ResourceId)
}
// Try to overwrite the dashboard via the Lakeview API (and expect failure).
{
_, err := wt.W.Lakeview.Create(ctx, dashboards.CreateDashboardRequest{
DisplayName: dashboardName,
ParentPath: dir,
SerializedDashboard: string(dashboardPayload),
})
require.ErrorIs(t, err, apierr.ErrResourceAlreadyExists)
}
// Retrieve the dashboard object and confirm that only select fields were updated by the import.
{
previousDashboard := dashboard
currentDashboard, err := wt.W.Lakeview.Get(ctx, dashboards.GetDashboardRequest{
DashboardId: dashboard.DashboardId,
})
require.NoError(t, err)
// Convert the dashboard object to a [dyn.Value] to make comparison easier.
previous, err := convert.FromTyped(previousDashboard, dyn.NilValue)
require.NoError(t, err)
current, err := convert.FromTyped(currentDashboard, dyn.NilValue)
require.NoError(t, err)
// Collect updated paths.
var updatedFieldPaths []string
_, err = merge.Override(previous, current, merge.OverrideVisitor{
VisitDelete: func(basePath dyn.Path, left dyn.Value) error {
assert.Fail(t, "unexpected delete operation")
return nil
},
VisitInsert: func(basePath dyn.Path, right dyn.Value) (dyn.Value, error) {
assert.Fail(t, "unexpected insert operation")
return right, nil
},
VisitUpdate: func(basePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) {
updatedFieldPaths = append(updatedFieldPaths, basePath.String())
return right, nil
},
})
require.NoError(t, err)
// Confirm that only the expected fields have been updated.
assert.ElementsMatch(t, []string{
"etag",
"update_time",
}, updatedFieldPaths)
}
}

View File

@ -112,8 +112,8 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) {
// assert mkdir fails // assert mkdir fails
_, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello")) _, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello"))
// Different cloud providers return different errors. // Different cloud providers or cloud configurations return different errors.
regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$|(^|: )"The specified path already exists.".*$`)
assert.Regexp(t, regex, err.Error()) assert.Regexp(t, regex, err.Error())
}) })

View File

@ -20,6 +20,7 @@ import (
"time" "time"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/flags" "github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/cmd" "github.com/databricks/cli/cmd"
@ -591,13 +592,10 @@ func setupWsfsExtensionsFiler(t *testing.T) (filer.Filer, string) {
} }
func setupDbfsFiler(t *testing.T) (filer.Filer, string) { func setupDbfsFiler(t *testing.T) (filer.Filer, string) {
t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, wt := acc.WorkspaceTest(t)
w, err := databricks.NewWorkspaceClient() tmpDir := TemporaryDbfsDir(t, wt.W)
require.NoError(t, err) f, err := filer.NewDbfsClient(wt.W, tmpDir)
tmpDir := TemporaryDbfsDir(t, w)
f, err := filer.NewDbfsClient(w, tmpDir)
require.NoError(t, err) require.NoError(t, err)
return f, path.Join("dbfs:/", tmpDir) return f, path.Join("dbfs:/", tmpDir)

View File

@ -398,6 +398,34 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d
return dyn.NewValue(out, src.Locations()), diags return dyn.NewValue(out, src.Locations()), diags
} }
func (n normalizeOptions) normalizeInterface(typ reflect.Type, src dyn.Value, path dyn.Path) (dyn.Value, diag.Diagnostics) { func (n normalizeOptions) normalizeInterface(_ reflect.Type, src dyn.Value, path dyn.Path) (dyn.Value, diag.Diagnostics) {
// Deal with every [dyn.Kind] here to ensure completeness.
switch src.Kind() {
case dyn.KindMap:
// Fall through
case dyn.KindSequence:
// Fall through
case dyn.KindString:
// Fall through
case dyn.KindBool:
// Fall through
case dyn.KindInt:
// Fall through
case dyn.KindFloat:
// Fall through
case dyn.KindTime:
// Conversion of a time value to an interface{}.
// The [dyn.Value.AsAny] equivalent for this kind is the [time.Time] struct.
// If we convert to a typed representation and back again, we cannot distinguish
// a [time.Time] struct from any other struct.
//
// Therefore, we normalize the time value to a string.
return dyn.NewValue(src.MustTime().String(), src.Locations()), nil
case dyn.KindNil:
// Fall through
default:
return dyn.InvalidValue, diag.Errorf("unsupported kind: %s", src.Kind())
}
return src, nil return src, nil
} }

View File

@ -858,23 +858,7 @@ func TestNormalizeAnchors(t *testing.T) {
}, vout.AsAny()) }, vout.AsAny())
} }
func TestNormalizeBoolToAny(t *testing.T) { func TestNormalizeAnyFromSlice(t *testing.T) {
var typ any
vin := dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Len(t, err, 0)
assert.Equal(t, dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
}
func TestNormalizeIntToAny(t *testing.T) {
var typ any
vin := dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Len(t, err, 0)
assert.Equal(t, dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
}
func TestNormalizeSliceToAny(t *testing.T) {
var typ any var typ any
v1 := dyn.NewValue(1, []dyn.Location{{File: "file", Line: 1, Column: 1}}) v1 := dyn.NewValue(1, []dyn.Location{{File: "file", Line: 1, Column: 1}})
v2 := dyn.NewValue(2, []dyn.Location{{File: "file", Line: 1, Column: 1}}) v2 := dyn.NewValue(2, []dyn.Location{{File: "file", Line: 1, Column: 1}})
@ -883,3 +867,35 @@ func TestNormalizeSliceToAny(t *testing.T) {
assert.Len(t, err, 0) assert.Len(t, err, 0)
assert.Equal(t, dyn.NewValue([]dyn.Value{v1, v2}, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout) assert.Equal(t, dyn.NewValue([]dyn.Value{v1, v2}, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
} }
func TestNormalizeAnyFromString(t *testing.T) {
var typ any
vin := dyn.NewValue("string", []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Len(t, err, 0)
assert.Equal(t, dyn.NewValue("string", []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
}
func TestNormalizeAnyFromBool(t *testing.T) {
var typ any
vin := dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Len(t, err, 0)
assert.Equal(t, dyn.NewValue(false, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
}
func TestNormalizeAnyFromInt(t *testing.T) {
var typ any
vin := dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Len(t, err, 0)
assert.Equal(t, dyn.NewValue(10, []dyn.Location{{File: "file", Line: 1, Column: 1}}), vout)
}
func TestNormalizeAnyFromTime(t *testing.T) {
var typ any
vin := dyn.NewValue(dyn.MustTime("2024-08-29"), []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Empty(t, err)
assert.Equal(t, dyn.NewValue("2024-08-29", vin.Locations()), vout)
}

View File

@ -0,0 +1,60 @@
package dynassert
import (
"fmt"
"strings"
"github.com/databricks/cli/libs/dyn"
)
// Dump returns the Go code to recreate the given value.
func Dump(v dyn.Value) string {
var sb strings.Builder
dump(v, &sb)
return sb.String()
}
func dump(v dyn.Value, sb *strings.Builder) {
sb.WriteString("dyn.NewValue(\n")
switch v.Kind() {
case dyn.KindMap:
sb.WriteString("map[string]dyn.Value{")
m := v.MustMap()
for _, p := range m.Pairs() {
sb.WriteString(fmt.Sprintf("\n%q: ", p.Key.MustString()))
dump(p.Value, sb)
sb.WriteByte(',')
}
sb.WriteString("\n},\n")
case dyn.KindSequence:
sb.WriteString("[]dyn.Value{\n")
for _, e := range v.MustSequence() {
dump(e, sb)
sb.WriteByte(',')
}
sb.WriteString("},\n")
case dyn.KindString:
sb.WriteString(fmt.Sprintf("%q,\n", v.MustString()))
case dyn.KindBool:
sb.WriteString(fmt.Sprintf("%t,\n", v.MustBool()))
case dyn.KindInt:
sb.WriteString(fmt.Sprintf("%d,\n", v.MustInt()))
case dyn.KindFloat:
sb.WriteString(fmt.Sprintf("%f,\n", v.MustFloat()))
case dyn.KindTime:
sb.WriteString(fmt.Sprintf("dyn.NewTime(%q),\n", v.MustTime().String()))
case dyn.KindNil:
sb.WriteString("nil,\n")
default:
panic(fmt.Sprintf("unhandled kind: %v", v.Kind()))
}
// Add location
sb.WriteString("[]dyn.Location{")
for _, l := range v.Locations() {
sb.WriteString(fmt.Sprintf("{File: %q, Line: %d, Column: %d},", l.File, l.Line, l.Column))
}
sb.WriteString("},\n")
sb.WriteString(")")
}

View File

@ -105,6 +105,9 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro
switch st { switch st {
case "!!str": case "!!str":
// OK // OK
case "!!null":
// A literal unquoted "null" is treated as a null value by the YAML parser.
// However, when used as a key, it is treated as the string "null".
case "!!merge": case "!!merge":
if merge != nil { if merge != nil {
panic("merge node already set") panic("merge node already set")
@ -115,10 +118,11 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro
return dyn.InvalidValue, errorf(loc, "invalid key tag: %v", st) return dyn.InvalidValue, errorf(loc, "invalid key tag: %v", st)
} }
k, err := d.load(key) k := dyn.NewValue(key.Value, []dyn.Location{{
if err != nil { File: d.path,
return dyn.InvalidValue, err Line: key.Line,
} Column: key.Column,
}})
v, err := d.load(val) v, err := d.load(val)
if err != nil { if err != nil {
@ -173,6 +177,14 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro
return dyn.NewValue(out, []dyn.Location{loc}), nil return dyn.NewValue(out, []dyn.Location{loc}), nil
} }
func newIntValue(i64 int64, loc dyn.Location) dyn.Value {
// Use regular int type instead of int64 if possible.
if i64 >= math.MinInt32 && i64 <= math.MaxInt32 {
return dyn.NewValue(int(i64), []dyn.Location{loc})
}
return dyn.NewValue(i64, []dyn.Location{loc})
}
func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error) { func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error) {
st := node.ShortTag() st := node.ShortTag()
switch st { switch st {
@ -188,18 +200,44 @@ func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error
return dyn.InvalidValue, errorf(loc, "invalid bool value: %v", node.Value) return dyn.InvalidValue, errorf(loc, "invalid bool value: %v", node.Value)
} }
case "!!int": case "!!int":
i64, err := strconv.ParseInt(node.Value, 10, 64) // Try to parse the an integer value in base 10.
if err != nil { // We trim leading zeros to avoid octal parsing of the "0" prefix.
return dyn.InvalidValue, errorf(loc, "invalid int value: %v", node.Value) // See "testdata/spec_example_2.19.yml" for background.
i64, err := strconv.ParseInt(strings.TrimLeft(node.Value, "0"), 10, 64)
if err == nil {
return newIntValue(i64, loc), nil
} }
// Use regular int type instead of int64 if possible. // Let the [ParseInt] function figure out the base.
if i64 >= math.MinInt32 && i64 <= math.MaxInt32 { i64, err = strconv.ParseInt(node.Value, 0, 64)
return dyn.NewValue(int(i64), []dyn.Location{loc}), nil if err == nil {
return newIntValue(i64, loc), nil
} }
return dyn.NewValue(i64, []dyn.Location{loc}), nil return dyn.InvalidValue, errorf(loc, "invalid int value: %v", node.Value)
case "!!float": case "!!float":
f64, err := strconv.ParseFloat(node.Value, 64) f64, err := strconv.ParseFloat(node.Value, 64)
if err != nil { if err != nil {
// Deal with infinity prefixes.
v := strings.ToLower(node.Value)
switch {
case strings.HasPrefix(v, "+"):
v = strings.TrimPrefix(v, "+")
f64 = math.Inf(1)
case strings.HasPrefix(v, "-"):
v = strings.TrimPrefix(v, "-")
f64 = math.Inf(-1)
default:
// No prefix.
f64 = math.Inf(1)
}
// Deal with infinity and NaN values.
switch v {
case ".inf":
return dyn.NewValue(f64, []dyn.Location{loc}), nil
case ".nan":
return dyn.NewValue(math.NaN(), []dyn.Location{loc}), nil
}
return dyn.InvalidValue, errorf(loc, "invalid float value: %v", node.Value) return dyn.InvalidValue, errorf(loc, "invalid float value: %v", node.Value)
} }
return dyn.NewValue(f64, []dyn.Location{loc}), nil return dyn.NewValue(f64, []dyn.Location{loc}), nil

View File

@ -0,0 +1,5 @@
# Example 2.1 Sequence of Scalars (ball players)
- Mark McGwire
- Sammy Sosa
- Ken Griffey

View File

@ -0,0 +1,10 @@
# Example 2.10 Node for “Sammy Sosa” appears twice in this document
---
hr:
- Mark McGwire
# Following node labeled SS
- &SS Sammy Sosa
rbi:
- *SS # Subsequent occurrence
- Ken Griffey

View File

@ -0,0 +1,10 @@
# Example 2.11 Mapping between Sequences
? - Detroit Tigers
- Chicago cubs
: - 2001-07-23
? [ New York Yankees,
Atlanta Braves ]
: [ 2001-07-02, 2001-08-12,
2001-08-14 ]

View File

@ -0,0 +1,10 @@
# Example 2.12 Compact Nested Mapping
---
# Products purchased
- item : Super Hoop
quantity: 1
- item : Basketball
quantity: 4
- item : Big Shoes
quantity: 1

View File

@ -0,0 +1,6 @@
# Example 2.13 In literals, newlines are preserved
# ASCII Art
--- |
\//||\/||
// || ||__

View File

@ -0,0 +1,6 @@
# Example 2.14 In the folded scalars, newlines become spaces
--- >
Mark McGwire's
year was crippled
by a knee injury.

View File

@ -0,0 +1,10 @@
# Example 2.15 Folded newlines are preserved for “more indented” and blank lines
--- >
Sammy Sosa completed another
fine season with great stats.
63 Home Runs
0.288 Batting Average
What a year!

View File

@ -0,0 +1,9 @@
# Example 2.16 Indentation determines scope
name: Mark McGwire
accomplishment: >
Mark set a major league
home run record in 1998.
stats: |
65 Home Runs
0.278 Batting Average

View File

@ -0,0 +1,9 @@
# Example 2.17 Quoted Scalars
unicode: "Sosa did fine.\u263A"
control: "\b1998\t1999\t2000\n"
hex esc: "\x0d\x0a is \r\n"
single: '"Howdy!" he cried.'
quoted: ' # Not a ''comment''.'
tie-fighter: '|\-*-/|'

View File

@ -0,0 +1,8 @@
# Example 2.18 Multi-line Flow Scalars
plain:
This unquoted scalar
spans many lines.
quoted: "So does this
quoted scalar.\n"

View File

@ -0,0 +1,15 @@
# Example 2.19 Integers
canonical: 12345
decimal: +12345
octal: 0o14
hexadecimal: 0xC
# Note: this example is not part of the spec but added for completeness.
#
# Octal numbers:
# - YAML 1.1: prefix is "0"
# - YAML 1.2: prefix is "0o"
# The "gopkg.in/yaml.v3" package accepts both for backwards compat.
# We accept only the YAML 1.2 prefix "0o".
octal11: 012345

View File

@ -0,0 +1,5 @@
# Example 2.2 Mapping Scalars to Scalars (player statistics)
hr: 65 # Home runs
avg: 0.278 # Batting average
rbi: 147 # Runs Batted In

View File

@ -0,0 +1,7 @@
# Example 2.20 Floating Point
canonical: 1.23015e+3
exponential: 12.3015e+02
fixed: 1230.15
negative infinity: -.inf
not a number: .nan

View File

@ -0,0 +1,5 @@
# Example 2.21 Miscellaneous
null:
booleans: [ true, false ]
string: '012345'

View File

@ -0,0 +1,6 @@
# Example 2.22 Timestamps
canonical: 2001-12-15T02:59:43.1Z
iso8601: 2001-12-14t21:59:43.10-05:00
spaced: 2001-12-14 21:59:43.10 -5
date: 2002-12-14

View File

@ -0,0 +1,15 @@
# Example 2.23 Various Explicit Tags
---
not-date: !!str 2002-04-28
picture: !!binary |
R0lGODlhDAAMAIQAAP//9/X
17unp5WZmZgAAAOfn515eXv
Pz7Y6OjuDg4J+fn5OTk6enp
56enmleECcgggoBADs=
application specific tag: !something |
The semantics of the tag
above may be different for
different documents.

View File

@ -0,0 +1,16 @@
# Example 2.24 Global Tags
%TAG ! tag:clarkevans.com,2002:
--- !shape
# Use the ! handle for presenting
# tag:clarkevans.com,2002:circle
- !circle
center: &ORIGIN {x: 73, y: 129}
radius: 7
- !line
start: *ORIGIN
finish: { x: 89, y: 102 }
- !label
start: *ORIGIN
color: 0xFFEEBB
text: Pretty vector drawing.

View File

@ -0,0 +1,9 @@
# Example 2.25 Unordered Sets
# Sets are represented as a
# Mapping where each key is
# associated with a null value
--- !!set
? Mark McGwire
? Sammy Sosa
? Ken Griffey

View File

@ -0,0 +1,9 @@
# Example 2.26 Ordered Mappings
# Ordered maps are represented as
# A sequence of mappings, with
# each mapping having one key
--- !!omap
- Mark McGwire: 65
- Sammy Sosa: 63
- Ken Griffey: 58

View File

@ -0,0 +1,31 @@
# Example 2.27 Invoice
--- !<tag:clarkevans.com,2002:invoice>
invoice: 34843
date : 2001-01-23
bill-to: &id001
given : Chris
family : Dumars
address:
lines: |
458 Walkman Dr.
Suite #292
city : Royal Oak
state : MI
postal : 48046
ship-to: *id001
product:
- sku : BL394D
quantity : 4
description : Basketball
price : 450.00
- sku : BL4438H
quantity : 1
description : Super Hoop
price : 2392.00
tax : 251.42
total: 4443.52
comments:
Late afternoon is best.
Backup contact is Nancy
Billsmer @ 338-4338.

View File

@ -0,0 +1,28 @@
# Example 2.28 Log File
---
Time: 2001-11-23 15:01:42 -5
User: ed
Warning:
This is an error message
for the log file
---
Time: 2001-11-23 15:02:31 -5
User: ed
Warning:
A slightly different error
message.
---
Date: 2001-11-23 15:03:17 -5
User: ed
Fatal:
Unknown variable "bar"
Stack:
- file: TopClass.py
line: 23
code: |
x = MoreObject("345\n")
- file: MoreClass.py
line: 58
code: |-
foo = bar

View File

@ -0,0 +1,10 @@
# Example 2.3 Mapping Scalars to Sequences (ball clubs in each league)
american:
- Boston Red Sox
- Detroit Tigers
- New York Yankees
national:
- New York Mets
- Chicago Cubs
- Atlanta Braves

View File

@ -0,0 +1,10 @@
# Example 2.4 Sequence of Mappings (players statistics)
-
name: Mark McGwire
hr: 65
avg: 0.278
-
name: Sammy Sosa
hr: 63
avg: 0.288

View File

@ -0,0 +1,5 @@
# Example 2.5 Sequence of Sequences
- [name , hr, avg ]
- [Mark McGwire, 65, 0.278]
- [Sammy Sosa , 63, 0.288]

View File

@ -0,0 +1,7 @@
# Example 2.6 Mapping of Mappings
Mark McGwire: {hr: 65, avg: 0.278}
Sammy Sosa: {
hr: 63,
avg: 0.288,
}

View File

@ -0,0 +1,12 @@
# Example 2.7 Two Documents in a Stream (each with a leading comment)
# Ranking of 1998 home runs
---
- Mark McGwire
- Sammy Sosa
- Ken Griffey
# Team ranking
---
- Chicago Cubs
- St Louis Cardinals

View File

@ -0,0 +1,12 @@
# Example 2.8 Play by Play Feed from a Game
---
time: 20:03:20
player: Sammy Sosa
action: strike (miss)
...
---
time: 20:03:47
player: Sammy Sosa
action: grand slam
...

View File

@ -0,0 +1,10 @@
# Example 2.9 Single Document with Two Comments
---
hr: # 1998 hr ranking
- Mark McGwire
- Sammy Sosa
# 1998 rbi ranking
rbi:
- Sammy Sosa
- Ken Griffey

View File

@ -0,0 +1,821 @@
package yamlloader_test
import (
"bytes"
"math"
"os"
"testing"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/stretchr/testify/require"
)
const NL = "\n"
func loadExample(t *testing.T, file string) dyn.Value {
input, err := os.ReadFile(file)
require.NoError(t, err)
self, err := yamlloader.LoadYAML(file, bytes.NewBuffer(input))
require.NoError(t, err)
return self
}
func TestYAMLSpecExample_2_1(t *testing.T) {
file := "testdata/spec_example_2.1.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
[]dyn.Value{
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 3, Column: 3}}),
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 4, Column: 3}}),
dyn.NewValue("Ken Griffey", []dyn.Location{{File: file, Line: 5, Column: 3}}),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_2(t *testing.T) {
file := "testdata/spec_example_2.2.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"hr": dyn.NewValue(65, []dyn.Location{{File: file, Line: 3, Column: 6}}),
"avg": dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 4, Column: 6}}),
"rbi": dyn.NewValue(147, []dyn.Location{{File: file, Line: 5, Column: 6}}),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_3(t *testing.T) {
file := "testdata/spec_example_2.3.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"american": dyn.NewValue(
[]dyn.Value{
dyn.NewValue("Boston Red Sox", []dyn.Location{{File: file, Line: 4, Column: 3}}),
dyn.NewValue("Detroit Tigers", []dyn.Location{{File: file, Line: 5, Column: 3}}),
dyn.NewValue("New York Yankees", []dyn.Location{{File: file, Line: 6, Column: 3}}),
},
[]dyn.Location{{File: file, Line: 4, Column: 1}},
),
"national": dyn.NewValue(
[]dyn.Value{
dyn.NewValue("New York Mets", []dyn.Location{{File: file, Line: 8, Column: 3}}),
dyn.NewValue("Chicago Cubs", []dyn.Location{{File: file, Line: 9, Column: 3}}),
dyn.NewValue("Atlanta Braves", []dyn.Location{{File: file, Line: 10, Column: 3}}),
},
[]dyn.Location{{File: file, Line: 8, Column: 1}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_4(t *testing.T) {
file := "testdata/spec_example_2.4.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
[]dyn.Value{
dyn.NewValue(
map[string]dyn.Value{
"name": dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 4, Column: 9}}),
"hr": dyn.NewValue(65, []dyn.Location{{File: file, Line: 5, Column: 9}}),
"avg": dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 6, Column: 9}}),
},
[]dyn.Location{{File: file, Line: 4, Column: 3}},
),
dyn.NewValue(
map[string]dyn.Value{
"name": dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 8, Column: 9}}),
"hr": dyn.NewValue(63, []dyn.Location{{File: file, Line: 9, Column: 9}}),
"avg": dyn.NewValue(0.288, []dyn.Location{{File: file, Line: 10, Column: 9}}),
},
[]dyn.Location{{File: file, Line: 8, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_5(t *testing.T) {
file := "testdata/spec_example_2.5.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
[]dyn.Value{
dyn.NewValue(
[]dyn.Value{
dyn.NewValue("name", []dyn.Location{{File: file, Line: 3, Column: 4}}),
dyn.NewValue("hr", []dyn.Location{{File: file, Line: 3, Column: 18}}),
dyn.NewValue("avg", []dyn.Location{{File: file, Line: 3, Column: 22}}),
},
[]dyn.Location{{File: file, Line: 3, Column: 3}},
),
dyn.NewValue(
[]dyn.Value{
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 4, Column: 4}}),
dyn.NewValue(65, []dyn.Location{{File: file, Line: 4, Column: 18}}),
dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 4, Column: 22}}),
},
[]dyn.Location{{File: file, Line: 4, Column: 3}},
),
dyn.NewValue(
[]dyn.Value{
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 5, Column: 4}}),
dyn.NewValue(63, []dyn.Location{{File: file, Line: 5, Column: 18}}),
dyn.NewValue(0.288, []dyn.Location{{File: file, Line: 5, Column: 22}}),
},
[]dyn.Location{{File: file, Line: 5, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_6(t *testing.T) {
file := "testdata/spec_example_2.6.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"Mark McGwire": dyn.NewValue(
map[string]dyn.Value{
"hr": dyn.NewValue(65, []dyn.Location{{File: file, Line: 3, Column: 20}}),
"avg": dyn.NewValue(0.278, []dyn.Location{{File: file, Line: 3, Column: 29}}),
},
[]dyn.Location{{File: file, Line: 3, Column: 15}},
),
"Sammy Sosa": dyn.NewValue(
map[string]dyn.Value{
"hr": dyn.NewValue(63, []dyn.Location{{File: file, Line: 5, Column: 9}}),
"avg": dyn.NewValue(0.288, []dyn.Location{{File: file, Line: 6, Column: 10}}),
},
[]dyn.Location{{File: file, Line: 4, Column: 13}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_7(t *testing.T) {
file := "testdata/spec_example_2.7.yml"
self := loadExample(t, file)
// Note: we do not support multiple documents in a single YAML file.
assert.Equal(t, dyn.NewValue(
[]dyn.Value{
dyn.NewValue(
"Mark McGwire",
[]dyn.Location{{File: file, Line: 5, Column: 3}},
),
dyn.NewValue(
"Sammy Sosa",
[]dyn.Location{{File: file, Line: 6, Column: 3}},
),
dyn.NewValue(
"Ken Griffey",
[]dyn.Location{{File: file, Line: 7, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 5, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_8(t *testing.T) {
file := "testdata/spec_example_2.8.yml"
self := loadExample(t, file)
// Note: we do not support multiple documents in a single YAML file.
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"time": dyn.NewValue("20:03:20", []dyn.Location{{File: file, Line: 4, Column: 7}}),
"player": dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 5, Column: 9}}),
"action": dyn.NewValue("strike (miss)", []dyn.Location{{File: file, Line: 6, Column: 9}}),
},
[]dyn.Location{{File: file, Line: 4, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_9(t *testing.T) {
file := "testdata/spec_example_2.9.yml"
self := loadExample(t, file)
// Note: we do not support multiple documents in a single YAML file.
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"hr": dyn.NewValue(
[]dyn.Value{
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 5, Column: 3}}),
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 6, Column: 3}}),
},
[]dyn.Location{{File: file, Line: 5, Column: 1}},
),
"rbi": dyn.NewValue(
[]dyn.Value{
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 9, Column: 3}}),
dyn.NewValue("Ken Griffey", []dyn.Location{{File: file, Line: 10, Column: 3}}),
},
[]dyn.Location{{File: file, Line: 9, Column: 1}},
),
},
[]dyn.Location{{File: file, Line: 4, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_10(t *testing.T) {
file := "testdata/spec_example_2.10.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"hr": dyn.NewValue(
[]dyn.Value{
dyn.NewValue("Mark McGwire", []dyn.Location{{File: file, Line: 5, Column: 3}}),
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 7, Column: 3}}),
},
[]dyn.Location{{File: file, Line: 5, Column: 1}},
),
"rbi": dyn.NewValue(
[]dyn.Value{
// The location for an anchored value refers to the anchor, not the reference.
// This is the same location as the anchor that appears in the "hr" mapping.
dyn.NewValue("Sammy Sosa", []dyn.Location{{File: file, Line: 7, Column: 3}}),
dyn.NewValue("Ken Griffey", []dyn.Location{{File: file, Line: 10, Column: 3}}),
},
[]dyn.Location{{File: file, Line: 9, Column: 1}},
),
},
[]dyn.Location{{File: file, Line: 4, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_11(t *testing.T) {
file := "testdata/spec_example_2.11.yml"
input, err := os.ReadFile(file)
require.NoError(t, err)
// Note: non-string mapping keys are not supported by "gopkg.in/yaml.v3".
_, err = yamlloader.LoadYAML(file, bytes.NewBuffer(input))
assert.ErrorContains(t, err, `: key is not a scalar`)
}
func TestYAMLSpecExample_2_12(t *testing.T) {
file := "testdata/spec_example_2.12.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
[]dyn.Value{
dyn.NewValue(
map[string]dyn.Value{
"item": dyn.NewValue("Super Hoop", []dyn.Location{{File: file, Line: 5, Column: 13}}),
"quantity": dyn.NewValue(1, []dyn.Location{{File: file, Line: 6, Column: 13}}),
},
[]dyn.Location{{File: file, Line: 5, Column: 3}},
),
dyn.NewValue(
map[string]dyn.Value{
"item": dyn.NewValue("Basketball", []dyn.Location{{File: file, Line: 7, Column: 13}}),
"quantity": dyn.NewValue(4, []dyn.Location{{File: file, Line: 8, Column: 13}}),
},
[]dyn.Location{{File: file, Line: 7, Column: 3}},
),
dyn.NewValue(
map[string]dyn.Value{
"item": dyn.NewValue("Big Shoes", []dyn.Location{{File: file, Line: 9, Column: 13}}),
"quantity": dyn.NewValue(1, []dyn.Location{{File: file, Line: 10, Column: 13}}),
},
[]dyn.Location{{File: file, Line: 9, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 5, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_13(t *testing.T) {
file := "testdata/spec_example_2.13.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
``+
`\//||\/||`+NL+
"// || ||__"+NL,
[]dyn.Location{{File: file, Line: 4, Column: 5}},
), self)
}
func TestYAMLSpecExample_2_14(t *testing.T) {
file := "testdata/spec_example_2.14.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
`Mark McGwire's year was crippled by a knee injury.`+NL,
[]dyn.Location{{File: file, Line: 3, Column: 5}},
), self)
}
func TestYAMLSpecExample_2_15(t *testing.T) {
file := "testdata/spec_example_2.15.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
``+
`Sammy Sosa completed another fine season with great stats.`+NL+
NL+
` 63 Home Runs`+NL+
` 0.288 Batting Average`+NL+
NL+
`What a year!`+NL,
[]dyn.Location{{File: file, Line: 3, Column: 5}},
), self)
}
func TestYAMLSpecExample_2_16(t *testing.T) {
file := "testdata/spec_example_2.16.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"name": dyn.NewValue(
"Mark McGwire",
[]dyn.Location{{File: file, Line: 3, Column: 7}},
),
"accomplishment": dyn.NewValue(
`Mark set a major league home run record in 1998.`+NL,
[]dyn.Location{{File: file, Line: 4, Column: 17}},
),
"stats": dyn.NewValue(
``+
`65 Home Runs`+NL+
`0.278 Batting Average`+NL,
[]dyn.Location{{File: file, Line: 7, Column: 8}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_17(t *testing.T) {
file := "testdata/spec_example_2.17.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"unicode": dyn.NewValue(
`Sosa did fine.`+"\u263A",
[]dyn.Location{{File: file, Line: 3, Column: 10}},
),
"control": dyn.NewValue(
"\b1998\t1999\t2000\n",
[]dyn.Location{{File: file, Line: 4, Column: 10}},
),
"hex esc": dyn.NewValue(
"\x0d\x0a is \r\n",
[]dyn.Location{{File: file, Line: 5, Column: 10}},
),
"single": dyn.NewValue(
`"Howdy!" he cried.`,
[]dyn.Location{{File: file, Line: 7, Column: 9}},
),
"quoted": dyn.NewValue(
` # Not a 'comment'.`,
[]dyn.Location{{File: file, Line: 8, Column: 9}},
),
"tie-fighter": dyn.NewValue(
`|\-*-/|`,
[]dyn.Location{{File: file, Line: 9, Column: 14}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_18(t *testing.T) {
file := "testdata/spec_example_2.18.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"plain": dyn.NewValue(
`This unquoted scalar spans many lines.`,
[]dyn.Location{{File: file, Line: 4, Column: 3}},
),
"quoted": dyn.NewValue(
`So does this quoted scalar.`+NL,
[]dyn.Location{{File: file, Line: 7, Column: 9}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_19(t *testing.T) {
file := "testdata/spec_example_2.19.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"canonical": dyn.NewValue(
12345,
[]dyn.Location{{File: file, Line: 3, Column: 12}},
),
"decimal": dyn.NewValue(
12345,
[]dyn.Location{{File: file, Line: 4, Column: 10}},
),
"octal": dyn.NewValue(
12,
[]dyn.Location{{File: file, Line: 5, Column: 8}},
),
"hexadecimal": dyn.NewValue(
12,
[]dyn.Location{{File: file, Line: 6, Column: 14}},
),
"octal11": dyn.NewValue(
12345,
[]dyn.Location{{File: file, Line: 15, Column: 10}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_20(t *testing.T) {
file := "testdata/spec_example_2.20.yml"
self := loadExample(t, file)
// Equality assertion doesn't work with NaNs.
// See https://github.com/stretchr/testify/issues/624.
//
// Remove the NaN entry.
self, _ = dyn.Walk(self, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if f, ok := v.AsFloat(); ok && math.IsNaN(f) {
return dyn.InvalidValue, dyn.ErrDrop
}
return v, nil
})
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"canonical": dyn.NewValue(
1230.15,
[]dyn.Location{{File: file, Line: 3, Column: 12}},
),
"exponential": dyn.NewValue(
1230.15,
[]dyn.Location{{File: file, Line: 4, Column: 14}},
),
"fixed": dyn.NewValue(
1230.15,
[]dyn.Location{{File: file, Line: 5, Column: 8}},
),
"negative infinity": dyn.NewValue(
math.Inf(-1),
[]dyn.Location{{File: file, Line: 6, Column: 20}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_21(t *testing.T) {
file := "testdata/spec_example_2.21.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"null": dyn.NewValue(
nil,
[]dyn.Location{{File: file, Line: 3, Column: 6}},
),
"booleans": dyn.NewValue(
[]dyn.Value{
dyn.NewValue(true, []dyn.Location{{File: file, Line: 4, Column: 13}}),
dyn.NewValue(false, []dyn.Location{{File: file, Line: 4, Column: 19}}),
},
[]dyn.Location{{File: file, Line: 4, Column: 11}},
),
"string": dyn.NewValue(
"012345",
[]dyn.Location{{File: file, Line: 5, Column: 9}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_22(t *testing.T) {
file := "testdata/spec_example_2.22.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"canonical": dyn.NewValue(
dyn.MustTime("2001-12-15T02:59:43.1Z"),
[]dyn.Location{{File: file, Line: 3, Column: 12}},
),
"iso8601": dyn.NewValue(
dyn.MustTime("2001-12-14t21:59:43.10-05:00"),
[]dyn.Location{{File: file, Line: 4, Column: 10}},
),
"spaced": dyn.NewValue(
// This is parsed as a string, not a timestamp,
// both by "gopkg.in/yaml.v3" and by our implementation.
"2001-12-14 21:59:43.10 -5",
[]dyn.Location{{File: file, Line: 5, Column: 9}},
),
"date": dyn.NewValue(
dyn.MustTime("2002-12-14"),
[]dyn.Location{{File: file, Line: 6, Column: 7}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 1}},
), self)
}
func TestYAMLSpecExample_2_23(t *testing.T) {
file := "testdata/spec_example_2.23.yml"
input, err := os.ReadFile(file)
require.NoError(t, err)
// Note: the !!binary tag is not supported by us.
_, err = yamlloader.LoadYAML(file, bytes.NewBuffer(input))
assert.ErrorContains(t, err, `: unknown tag: !!binary`)
}
func TestYAMLSpecExample_2_24(t *testing.T) {
file := "testdata/spec_example_2.24.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
[]dyn.Value{
dyn.NewValue(
map[string]dyn.Value{
"center": dyn.NewValue(
map[string]dyn.Value{
"x": dyn.NewValue(73, []dyn.Location{{File: file, Line: 8, Column: 23}}),
"y": dyn.NewValue(129, []dyn.Location{{File: file, Line: 8, Column: 30}}),
},
[]dyn.Location{{File: file, Line: 8, Column: 11}},
),
"radius": dyn.NewValue(7, []dyn.Location{{File: file, Line: 9, Column: 11}}),
},
[]dyn.Location{{File: file, Line: 7, Column: 3}},
),
dyn.NewValue(
map[string]dyn.Value{
"start": dyn.NewValue(
map[string]dyn.Value{
"x": dyn.NewValue(73, []dyn.Location{{File: file, Line: 8, Column: 23}}),
"y": dyn.NewValue(129, []dyn.Location{{File: file, Line: 8, Column: 30}}),
},
[]dyn.Location{{File: file, Line: 8, Column: 11}},
),
"finish": dyn.NewValue(
map[string]dyn.Value{
"x": dyn.NewValue(89, []dyn.Location{{File: file, Line: 12, Column: 16}}),
"y": dyn.NewValue(102, []dyn.Location{{File: file, Line: 12, Column: 23}}),
},
[]dyn.Location{{File: file, Line: 12, Column: 11}},
),
},
[]dyn.Location{{File: file, Line: 10, Column: 3}},
),
dyn.NewValue(
map[string]dyn.Value{
"start": dyn.NewValue(
map[string]dyn.Value{
"x": dyn.NewValue(73, []dyn.Location{{File: file, Line: 8, Column: 23}}),
"y": dyn.NewValue(129, []dyn.Location{{File: file, Line: 8, Column: 30}}),
},
[]dyn.Location{{File: file, Line: 8, Column: 11}},
),
"color": dyn.NewValue(16772795, []dyn.Location{{File: file, Line: 15, Column: 10}}),
"text": dyn.NewValue("Pretty vector drawing.", []dyn.Location{{File: file, Line: 16, Column: 9}}),
},
[]dyn.Location{{File: file, Line: 13, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 4, Column: 5}},
), self)
}
func TestYAMLSpecExample_2_25(t *testing.T) {
file := "testdata/spec_example_2.25.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"Mark McGwire": dyn.NewValue(nil, []dyn.Location{{File: file, Line: 8, Column: 1}}),
"Sammy Sosa": dyn.NewValue(nil, []dyn.Location{{File: file, Line: 9, Column: 1}}),
"Ken Griffey": dyn.NewValue(nil, []dyn.Location{{File: file, Line: 10, Column: 1}}),
},
[]dyn.Location{{File: file, Line: 6, Column: 5}},
), self)
}
func TestYAMLSpecExample_2_26(t *testing.T) {
file := "testdata/spec_example_2.26.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
[]dyn.Value{
dyn.NewValue(
map[string]dyn.Value{
"Mark McGwire": dyn.NewValue(65, []dyn.Location{{File: file, Line: 7, Column: 17}}),
},
[]dyn.Location{{File: file, Line: 7, Column: 3}},
),
dyn.NewValue(
map[string]dyn.Value{
"Sammy Sosa": dyn.NewValue(63, []dyn.Location{{File: file, Line: 8, Column: 15}}),
},
[]dyn.Location{{File: file, Line: 8, Column: 3}},
),
dyn.NewValue(
map[string]dyn.Value{
"Ken Griffey": dyn.NewValue(58, []dyn.Location{{File: file, Line: 9, Column: 16}}),
},
[]dyn.Location{{File: file, Line: 9, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 6, Column: 5}},
), self)
}
func TestYAMLSpecExample_2_27(t *testing.T) {
file := "testdata/spec_example_2.27.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"invoice": dyn.NewValue(
34843,
[]dyn.Location{{File: file, Line: 4, Column: 10}},
),
"date": dyn.NewValue(
dyn.MustTime("2001-01-23"),
[]dyn.Location{{File: file, Line: 5, Column: 10}},
),
"bill-to": dyn.NewValue(
map[string]dyn.Value{
"given": dyn.NewValue(
"Chris",
[]dyn.Location{{File: file, Line: 7, Column: 12}},
),
"family": dyn.NewValue(
"Dumars",
[]dyn.Location{{File: file, Line: 8, Column: 12}},
),
"address": dyn.NewValue(
map[string]dyn.Value{
"lines": dyn.NewValue(
"458 Walkman Dr.\nSuite #292\n",
[]dyn.Location{{File: file, Line: 10, Column: 12}},
),
"city": dyn.NewValue(
"Royal Oak",
[]dyn.Location{{File: file, Line: 13, Column: 15}},
),
"state": dyn.NewValue(
"MI",
[]dyn.Location{{File: file, Line: 14, Column: 15}},
),
"postal": dyn.NewValue(
48046,
[]dyn.Location{{File: file, Line: 15, Column: 15}},
),
},
[]dyn.Location{{File: file, Line: 10, Column: 5}},
),
},
[]dyn.Location{{File: file, Line: 6, Column: 10}},
),
"ship-to": dyn.NewValue(
map[string]dyn.Value{
"given": dyn.NewValue(
"Chris",
[]dyn.Location{{File: file, Line: 7, Column: 12}},
),
"family": dyn.NewValue(
"Dumars",
[]dyn.Location{{File: file, Line: 8, Column: 12}},
),
"address": dyn.NewValue(
map[string]dyn.Value{
"lines": dyn.NewValue(
"458 Walkman Dr.\nSuite #292\n",
[]dyn.Location{{File: file, Line: 10, Column: 12}},
),
"city": dyn.NewValue(
"Royal Oak",
[]dyn.Location{{File: file, Line: 13, Column: 15}},
),
"state": dyn.NewValue(
"MI",
[]dyn.Location{{File: file, Line: 14, Column: 15}},
),
"postal": dyn.NewValue(
48046,
[]dyn.Location{{File: file, Line: 15, Column: 15}},
),
},
[]dyn.Location{{File: file, Line: 10, Column: 5}},
),
},
[]dyn.Location{{File: file, Line: 6, Column: 10}},
),
"product": dyn.NewValue(
[]dyn.Value{
dyn.NewValue(
map[string]dyn.Value{
"sku": dyn.NewValue(
"BL394D",
[]dyn.Location{{File: file, Line: 18, Column: 17}},
),
"quantity": dyn.NewValue(
4,
[]dyn.Location{{File: file, Line: 19, Column: 17}},
),
"description": dyn.NewValue(
"Basketball",
[]dyn.Location{{File: file, Line: 20, Column: 17}},
),
"price": dyn.NewValue(
450.0,
[]dyn.Location{{File: file, Line: 21, Column: 17}},
),
},
[]dyn.Location{{File: file, Line: 18, Column: 3}},
), dyn.NewValue(
map[string]dyn.Value{
"sku": dyn.NewValue(
"BL4438H",
[]dyn.Location{{File: file, Line: 22, Column: 17}},
),
"quantity": dyn.NewValue(
1,
[]dyn.Location{{File: file, Line: 23, Column: 17}},
),
"description": dyn.NewValue(
"Super Hoop",
[]dyn.Location{{File: file, Line: 24, Column: 17}},
),
"price": dyn.NewValue(
2392.0,
[]dyn.Location{{File: file, Line: 25, Column: 17}},
),
},
[]dyn.Location{{File: file, Line: 22, Column: 3}},
)},
[]dyn.Location{{File: file, Line: 18, Column: 1}},
),
"tax": dyn.NewValue(
251.42,
[]dyn.Location{{File: file, Line: 26, Column: 8}},
),
"total": dyn.NewValue(
4443.52,
[]dyn.Location{{File: file, Line: 27, Column: 8}},
),
"comments": dyn.NewValue(
"Late afternoon is best. Backup contact is Nancy Billsmer @ 338-4338.",
[]dyn.Location{{File: file, Line: 29, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 3, Column: 5}},
), self)
}
func TestYAMLSpecExample_2_28(t *testing.T) {
file := "testdata/spec_example_2.28.yml"
self := loadExample(t, file)
assert.Equal(t, dyn.NewValue(
map[string]dyn.Value{
"Time": dyn.NewValue(
"2001-11-23 15:01:42 -5",
[]dyn.Location{{File: file, Line: 4, Column: 7}},
),
"User": dyn.NewValue(
"ed",
[]dyn.Location{{File: file, Line: 5, Column: 7}},
),
"Warning": dyn.NewValue(
"This is an error message for the log file",
[]dyn.Location{{File: file, Line: 7, Column: 3}},
),
},
[]dyn.Location{{File: file, Line: 4, Column: 1}},
), self)
}

View File

@ -23,8 +23,21 @@ type Repository struct {
// directory where we process .gitignore files. // directory where we process .gitignore files.
real bool real bool
// root is the absolute path to the repository root. // rootDir is the path to the root of the repository checkout.
root vfs.Path // This can be either the main repository checkout or a worktree checkout.
// For more information about worktrees, see: https://git-scm.com/docs/git-worktree#_description.
rootDir vfs.Path
// gitDir is the equivalent of $GIT_DIR and points to the
// `.git` directory of a repository or a worktree directory.
// See https://git-scm.com/docs/git-worktree#_details for more information.
gitDir vfs.Path
// gitCommonDir is the equivalent of $GIT_COMMON_DIR and points to the
// `.git` directory of the main working tree (common between worktrees).
// This is equivalent to [gitDir] if this is the main working tree.
// See https://git-scm.com/docs/git-worktree#_details for more information.
gitCommonDir vfs.Path
// ignore contains a list of ignore patterns indexed by the // ignore contains a list of ignore patterns indexed by the
// path prefix relative to the repository root. // path prefix relative to the repository root.
@ -44,12 +57,11 @@ type Repository struct {
// Root returns the absolute path to the repository root. // Root returns the absolute path to the repository root.
func (r *Repository) Root() string { func (r *Repository) Root() string {
return r.root.Native() return r.rootDir.Native()
} }
func (r *Repository) CurrentBranch() (string, error) { func (r *Repository) CurrentBranch() (string, error) {
// load .git/HEAD ref, err := LoadReferenceFile(r.gitDir, "HEAD")
ref, err := LoadReferenceFile(r.root, path.Join(GitDirectoryName, "HEAD"))
if err != nil { if err != nil {
return "", err return "", err
} }
@ -65,8 +77,7 @@ func (r *Repository) CurrentBranch() (string, error) {
} }
func (r *Repository) LatestCommit() (string, error) { func (r *Repository) LatestCommit() (string, error) {
// load .git/HEAD ref, err := LoadReferenceFile(r.gitDir, "HEAD")
ref, err := LoadReferenceFile(r.root, path.Join(GitDirectoryName, "HEAD"))
if err != nil { if err != nil {
return "", err return "", err
} }
@ -80,12 +91,12 @@ func (r *Repository) LatestCommit() (string, error) {
return ref.Content, nil return ref.Content, nil
} }
// read reference from .git/HEAD // Read reference from $GIT_DIR/HEAD
branchHeadPath, err := ref.ResolvePath() branchHeadPath, err := ref.ResolvePath()
if err != nil { if err != nil {
return "", err return "", err
} }
branchHeadRef, err := LoadReferenceFile(r.root, path.Join(GitDirectoryName, branchHeadPath)) branchHeadRef, err := LoadReferenceFile(r.gitCommonDir, branchHeadPath)
if err != nil { if err != nil {
return "", err return "", err
} }
@ -125,7 +136,7 @@ func (r *Repository) loadConfig() error {
if err != nil { if err != nil {
return fmt.Errorf("unable to load user specific gitconfig: %w", err) return fmt.Errorf("unable to load user specific gitconfig: %w", err)
} }
err = config.loadFile(r.root, ".git/config") err = config.loadFile(r.gitCommonDir, "config")
if err != nil { if err != nil {
return fmt.Errorf("unable to load repository specific gitconfig: %w", err) return fmt.Errorf("unable to load repository specific gitconfig: %w", err)
} }
@ -133,12 +144,6 @@ func (r *Repository) loadConfig() error {
return nil return nil
} }
// newIgnoreFile constructs a new [ignoreRules] implementation backed by
// a file using the specified path relative to the repository root.
func (r *Repository) newIgnoreFile(relativeIgnoreFilePath string) ignoreRules {
return newIgnoreFile(r.root, relativeIgnoreFilePath)
}
// getIgnoreRules returns a slice of [ignoreRules] that apply // getIgnoreRules returns a slice of [ignoreRules] that apply
// for the specified prefix. The prefix must be cleaned by the caller. // for the specified prefix. The prefix must be cleaned by the caller.
// It lazily initializes an entry for the specified prefix if it // It lazily initializes an entry for the specified prefix if it
@ -149,7 +154,7 @@ func (r *Repository) getIgnoreRules(prefix string) []ignoreRules {
return fs return fs
} }
r.ignore[prefix] = append(r.ignore[prefix], r.newIgnoreFile(path.Join(prefix, gitIgnoreFileName))) r.ignore[prefix] = append(r.ignore[prefix], newIgnoreFile(r.rootDir, path.Join(prefix, gitIgnoreFileName)))
return r.ignore[prefix] return r.ignore[prefix]
} }
@ -205,21 +210,30 @@ func (r *Repository) Ignore(relPath string) (bool, error) {
func NewRepository(path vfs.Path) (*Repository, error) { func NewRepository(path vfs.Path) (*Repository, error) {
real := true real := true
rootPath, err := vfs.FindLeafInTree(path, GitDirectoryName) rootDir, err := vfs.FindLeafInTree(path, GitDirectoryName)
if err != nil { if err != nil {
if !errors.Is(err, fs.ErrNotExist) { if !errors.Is(err, fs.ErrNotExist) {
return nil, err return nil, err
} }
// Cannot find `.git` directory. // Cannot find `.git` directory.
// Treat the specified path as a potential repository root. // Treat the specified path as a potential repository root checkout.
real = false real = false
rootPath = path rootDir = path
}
// Derive $GIT_DIR and $GIT_COMMON_DIR paths if this is a real repository.
// If it isn't a real repository, they'll point to the (non-existent) `.git` directory.
gitDir, gitCommonDir, err := resolveGitDirs(rootDir)
if err != nil {
return nil, err
} }
repo := &Repository{ repo := &Repository{
real: real, real: real,
root: rootPath, rootDir: rootDir,
ignore: make(map[string][]ignoreRules), gitDir: gitDir,
gitCommonDir: gitCommonDir,
ignore: make(map[string][]ignoreRules),
} }
err = repo.loadConfig() err = repo.loadConfig()
@ -252,10 +266,10 @@ func NewRepository(path vfs.Path) (*Repository, error) {
newStringIgnoreRules([]string{ newStringIgnoreRules([]string{
".git", ".git",
}), }),
// Load repository-wide excludes file. // Load repository-wide exclude file.
repo.newIgnoreFile(".git/info/excludes"), newIgnoreFile(repo.gitCommonDir, "info/exclude"),
// Load root gitignore file. // Load root gitignore file.
repo.newIgnoreFile(".gitignore"), newIgnoreFile(repo.rootDir, ".gitignore"),
} }
return repo, nil return repo, nil

View File

@ -80,7 +80,7 @@ func NewView(root vfs.Path) (*View, error) {
// Target path must be relative to the repository root path. // Target path must be relative to the repository root path.
target := root.Native() target := root.Native()
prefix := repo.root.Native() prefix := repo.rootDir.Native()
if !strings.HasPrefix(target, prefix) { if !strings.HasPrefix(target, prefix) {
return nil, fmt.Errorf("path %q is not within repository root %q", root.Native(), prefix) return nil, fmt.Errorf("path %q is not within repository root %q", root.Native(), prefix)
} }

123
libs/git/worktree.go Normal file
View File

@ -0,0 +1,123 @@
package git
import (
"bufio"
"errors"
"fmt"
"io/fs"
"path/filepath"
"strings"
"github.com/databricks/cli/libs/vfs"
)
func readLines(root vfs.Path, name string) ([]string, error) {
file, err := root.Open(name)
if err != nil {
return nil, err
}
defer file.Close()
var lines []string
scanner := bufio.NewScanner(file)
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
return lines, scanner.Err()
}
// readGitDir reads the value of the `.git` file in a worktree.
func readGitDir(root vfs.Path) (string, error) {
lines, err := readLines(root, GitDirectoryName)
if err != nil {
return "", err
}
var gitDir string
for _, line := range lines {
parts := strings.SplitN(line, ": ", 2)
if len(parts) != 2 {
continue
}
if parts[0] == "gitdir" {
gitDir = strings.TrimSpace(parts[1])
}
}
if gitDir == "" {
return "", fmt.Errorf(`expected %q to contain a line with "gitdir: [...]"`, filepath.Join(root.Native(), GitDirectoryName))
}
return gitDir, nil
}
// readGitCommonDir reads the value of the `commondir` file in the `.git` directory of a worktree.
// This file typically contains "../.." to point to $GIT_COMMON_DIR.
func readGitCommonDir(gitDir vfs.Path) (string, error) {
lines, err := readLines(gitDir, "commondir")
if err != nil {
return "", err
}
if len(lines) == 0 {
return "", errors.New("file is empty")
}
return strings.TrimSpace(lines[0]), nil
}
// resolveGitDirs resolves the paths for $GIT_DIR and $GIT_COMMON_DIR.
// The path argument is the root of the checkout where (supposedly) a `.git` file or directory exists.
func resolveGitDirs(root vfs.Path) (vfs.Path, vfs.Path, error) {
fileInfo, err := root.Stat(GitDirectoryName)
if err != nil {
// If the `.git` file or directory does not exist, then this is not a git repository.
// Return paths that we know don't exist, so we do not need to perform nil checks in the caller.
if errors.Is(err, fs.ErrNotExist) {
gitDir := vfs.MustNew(filepath.Join(root.Native(), GitDirectoryName))
return gitDir, gitDir, nil
}
return nil, nil, err
}
// If the path is a directory, then it is the main working tree.
// Both $GIT_DIR and $GIT_COMMON_DIR point to the same directory.
if fileInfo.IsDir() {
gitDir := vfs.MustNew(filepath.Join(root.Native(), GitDirectoryName))
return gitDir, gitDir, nil
}
// If the path is not a directory, then it is a worktree.
// Read value for $GIT_DIR.
gitDirValue, err := readGitDir(root)
if err != nil {
return nil, nil, err
}
// Resolve $GIT_DIR.
var gitDir vfs.Path
if filepath.IsAbs(gitDirValue) {
gitDir = vfs.MustNew(gitDirValue)
} else {
gitDir = vfs.MustNew(filepath.Join(root.Native(), gitDirValue))
}
// Read value for $GIT_COMMON_DIR.
gitCommonDirValue, err := readGitCommonDir(gitDir)
if err != nil {
return nil, nil, fmt.Errorf(`expected "commondir" file in worktree git folder at %q: %w`, gitDir.Native(), err)
}
// Resolve $GIT_COMMON_DIR.
var gitCommonDir vfs.Path
if filepath.IsAbs(gitCommonDirValue) {
gitCommonDir = vfs.MustNew(gitCommonDirValue)
} else {
gitCommonDir = vfs.MustNew(filepath.Join(gitDir.Native(), gitCommonDirValue))
}
return gitDir, gitCommonDir, nil
}

108
libs/git/worktree_test.go Normal file
View File

@ -0,0 +1,108 @@
package git
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/databricks/cli/libs/vfs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func setupWorktree(t *testing.T) string {
var err error
tmpDir := t.TempDir()
// Checkout path
err = os.MkdirAll(filepath.Join(tmpDir, "my_worktree"), os.ModePerm)
require.NoError(t, err)
// Main $GIT_COMMON_DIR
err = os.MkdirAll(filepath.Join(tmpDir, ".git"), os.ModePerm)
require.NoError(t, err)
// Worktree $GIT_DIR
err = os.MkdirAll(filepath.Join(tmpDir, ".git/worktrees/my_worktree"), os.ModePerm)
require.NoError(t, err)
return tmpDir
}
func writeGitDir(t *testing.T, dir, content string) {
err := os.WriteFile(filepath.Join(dir, "my_worktree/.git"), []byte(content), os.ModePerm)
require.NoError(t, err)
}
func writeGitCommonDir(t *testing.T, dir, content string) {
err := os.WriteFile(filepath.Join(dir, ".git/worktrees/my_worktree/commondir"), []byte(content), os.ModePerm)
require.NoError(t, err)
}
func verifyCorrectDirs(t *testing.T, dir string) {
gitDir, gitCommonDir, err := resolveGitDirs(vfs.MustNew(filepath.Join(dir, "my_worktree")))
require.NoError(t, err)
assert.Equal(t, filepath.Join(dir, ".git/worktrees/my_worktree"), gitDir.Native())
assert.Equal(t, filepath.Join(dir, ".git"), gitCommonDir.Native())
}
func TestWorktreeResolveGitDir(t *testing.T) {
dir := setupWorktree(t)
writeGitCommonDir(t, dir, "../..")
t.Run("relative", func(t *testing.T) {
writeGitDir(t, dir, fmt.Sprintf("gitdir: %s", "../.git/worktrees/my_worktree"))
verifyCorrectDirs(t, dir)
})
t.Run("absolute", func(t *testing.T) {
writeGitDir(t, dir, fmt.Sprintf("gitdir: %s", filepath.Join(dir, ".git/worktrees/my_worktree")))
verifyCorrectDirs(t, dir)
})
t.Run("additional spaces", func(t *testing.T) {
writeGitDir(t, dir, fmt.Sprintf("gitdir: %s \n\n\n", "../.git/worktrees/my_worktree"))
verifyCorrectDirs(t, dir)
})
t.Run("empty", func(t *testing.T) {
writeGitDir(t, dir, "")
_, _, err := resolveGitDirs(vfs.MustNew(filepath.Join(dir, "my_worktree")))
assert.ErrorContains(t, err, ` to contain a line with "gitdir: [...]"`)
})
}
func TestWorktreeResolveCommonDir(t *testing.T) {
dir := setupWorktree(t)
writeGitDir(t, dir, fmt.Sprintf("gitdir: %s", "../.git/worktrees/my_worktree"))
t.Run("relative", func(t *testing.T) {
writeGitCommonDir(t, dir, "../..")
verifyCorrectDirs(t, dir)
})
t.Run("absolute", func(t *testing.T) {
writeGitCommonDir(t, dir, filepath.Join(dir, ".git"))
verifyCorrectDirs(t, dir)
})
t.Run("additional spaces", func(t *testing.T) {
writeGitCommonDir(t, dir, " ../.. \n\n\n")
verifyCorrectDirs(t, dir)
})
t.Run("empty", func(t *testing.T) {
writeGitCommonDir(t, dir, "")
_, _, err := resolveGitDirs(vfs.MustNew(filepath.Join(dir, "my_worktree")))
assert.ErrorContains(t, err, `expected "commondir" file in worktree git folder at `)
})
t.Run("missing", func(t *testing.T) {
_, _, err := resolveGitDirs(vfs.MustNew(filepath.Join(dir, "my_worktree")))
assert.ErrorContains(t, err, `expected "commondir" file in worktree git folder at `)
})
}

View File

@ -3,7 +3,7 @@ resources:
pipelines: pipelines:
{{.project_name}}_pipeline: {{.project_name}}_pipeline:
name: {{.project_name}}_pipeline name: {{.project_name}}_pipeline
{{- if eq default_catalog ""}} {{- if or (eq default_catalog "") (eq default_catalog "hive_metastore")}}
## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog: ## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
# catalog: catalog_name # catalog: catalog_name
{{- else}} {{- else}}