Generate correct YAML if custom_tags or spark_conf is used for pipeline or job cluster configuration (#1210)

These fields (key and values) needs to be double quoted in order for
yaml loader to read, parse and unmarshal it into Go struct correctly
because these fields are `map[string]string` type.

## Tests
Added regression unit and E2E tests
This commit is contained in:
Andrew Nester 2024-02-15 16:03:19 +01:00 committed by GitHub
parent 299e9b56a6
commit e474948a4b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 292 additions and 39 deletions

View File

@ -12,6 +12,8 @@ import (
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/databricks/databricks-sdk-go/service/workspace"
"github.com/stretchr/testify/mock"
@ -36,6 +38,18 @@ func TestGeneratePipelineCommand(t *testing.T) {
Name: "test-pipeline",
Spec: &pipelines.PipelineSpec{
Name: "test-pipeline",
Clusters: []pipelines.PipelineCluster{
{
CustomTags: map[string]string{
"Tag1": "24X7-1234",
},
},
{
SparkConf: map[string]string{
"spark.databricks.delta.preview.enabled": "true",
},
},
},
Libraries: []pipelines.PipelineLibrary{
{Notebook: &pipelines.NotebookLibrary{
Path: "/test/notebook",
@ -85,6 +99,11 @@ func TestGeneratePipelineCommand(t *testing.T) {
pipelines:
test_pipeline:
name: test-pipeline
clusters:
- custom_tags:
"Tag1": "24X7-1234"
- spark_conf:
"spark.databricks.delta.preview.enabled": "true"
libraries:
- notebook:
path: %s
@ -100,3 +119,93 @@ func TestGeneratePipelineCommand(t *testing.T) {
require.NoError(t, err)
require.Equal(t, "Py content", string(data))
}
func TestGenerateJobCommand(t *testing.T) {
cmd := NewGenerateJobCommand()
root := t.TempDir()
b := &bundle.Bundle{
Config: config.Root{
Path: root,
},
}
m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient)
jobsApi := m.GetMockJobsAPI()
jobsApi.EXPECT().Get(mock.Anything, jobs.GetJobRequest{JobId: 1234}).Return(&jobs.Job{
Settings: &jobs.JobSettings{
Name: "test-job",
JobClusters: []jobs.JobCluster{
{NewCluster: &compute.ClusterSpec{
CustomTags: map[string]string{
"Tag1": "24X7-1234",
},
}},
{NewCluster: &compute.ClusterSpec{
SparkConf: map[string]string{
"spark.databricks.delta.preview.enabled": "true",
},
}},
},
Tasks: []jobs.Task{
{
TaskKey: "notebook_task",
NotebookTask: &jobs.NotebookTask{
NotebookPath: "/test/notebook",
},
},
},
},
}, nil)
workspaceApi := m.GetMockWorkspaceAPI()
workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/test/notebook").Return(&workspace.ObjectInfo{
ObjectType: workspace.ObjectTypeNotebook,
Language: workspace.LanguagePython,
Path: "/test/notebook",
}, nil)
notebookContent := io.NopCloser(bytes.NewBufferString("# Databricks notebook source\nNotebook content"))
workspaceApi.EXPECT().Download(mock.Anything, "/test/notebook", mock.Anything).Return(notebookContent, nil)
cmd.SetContext(bundle.Context(context.Background(), b))
cmd.Flag("existing-job-id").Value.Set("1234")
configDir := filepath.Join(root, "resources")
cmd.Flag("config-dir").Value.Set(configDir)
srcDir := filepath.Join(root, "src")
cmd.Flag("source-dir").Value.Set(srcDir)
var key string
cmd.Flags().StringVar(&key, "key", "test_job", "")
err := cmd.RunE(cmd, []string{})
require.NoError(t, err)
data, err := os.ReadFile(filepath.Join(configDir, "test_job.yml"))
require.NoError(t, err)
require.Equal(t, fmt.Sprintf(`resources:
jobs:
test_job:
name: test-job
job_clusters:
- new_cluster:
custom_tags:
"Tag1": "24X7-1234"
- new_cluster:
spark_conf:
"spark.databricks.delta.preview.enabled": "true"
tasks:
- task_key: notebook_task
notebook_task:
notebook_path: %s
`, filepath.Join("..", "src", "notebook.py")), string(data))
data, err = os.ReadFile(filepath.Join(srcDir, "notebook.py"))
require.NoError(t, err)
require.Equal(t, "# Databricks notebook source\nNotebook content", string(data))
}

View File

@ -14,6 +14,7 @@ import (
"github.com/databricks/cli/libs/textutil"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/spf13/cobra"
"gopkg.in/yaml.v3"
)
func NewGenerateJobCommand() *cobra.Command {
@ -82,7 +83,13 @@ func NewGenerateJobCommand() *cobra.Command {
}
filename := filepath.Join(configDir, fmt.Sprintf("%s.yml", jobKey))
err = yamlsaver.SaveAsYAML(result, filename, force)
saver := yamlsaver.NewSaverWithStyle(map[string]yaml.Style{
// Including all JobSettings and nested fields which are map[string]string type
"spark_conf": yaml.DoubleQuotedStyle,
"custom_tags": yaml.DoubleQuotedStyle,
"tags": yaml.DoubleQuotedStyle,
})
err = saver.SaveAsYAML(result, filename, force)
if err != nil {
return err
}

View File

@ -14,6 +14,7 @@ import (
"github.com/databricks/cli/libs/textutil"
"github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/spf13/cobra"
"gopkg.in/yaml.v3"
)
func NewGeneratePipelineCommand() *cobra.Command {
@ -82,7 +83,15 @@ func NewGeneratePipelineCommand() *cobra.Command {
}
filename := filepath.Join(configDir, fmt.Sprintf("%s.yml", pipelineKey))
err = yamlsaver.SaveAsYAML(result, filename, force)
saver := yamlsaver.NewSaverWithStyle(
// Including all PipelineSpec and nested fields which are map[string]string type
map[string]yaml.Style{
"spark_conf": yaml.DoubleQuotedStyle,
"custom_tags": yaml.DoubleQuotedStyle,
"configuration": yaml.DoubleQuotedStyle,
},
)
err = saver.SaveAsYAML(result, filename, force)
if err != nil {
return err
}

View File

@ -103,6 +103,11 @@ func (gt *generateJobTest) createTestJob(ctx context.Context) int64 {
SparkVersion: "13.3.x-scala2.12",
NumWorkers: 1,
NodeTypeId: nodeTypeId,
SparkConf: map[string]string{
"spark.databricks.enableWsfs": "true",
"spark.databricks.hive.metastore.glueCatalog.enabled": "true",
"spark.databricks.pip.ignoreSSL": "true",
},
},
NotebookTask: &jobs.NotebookTask{
NotebookPath: path.Join(tmpdir, "test"),

View File

@ -94,6 +94,9 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string,
err = f.Write(ctx, "test.py", strings.NewReader("print('Hello!')"))
require.NoError(t, err)
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
nodeTypeId := internal.GetNodeTypeId(env)
name := internal.RandomName("generated-pipeline-")
resp, err := w.Pipelines.Create(ctx, pipelines.CreatePipeline{
Name: name,
@ -109,6 +112,22 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string,
},
},
},
Clusters: []pipelines.PipelineCluster{
{
CustomTags: map[string]string{
"Tag1": "Yes",
"Tag2": "24X7",
"Tag3": "APP-1234",
},
NodeTypeId: nodeTypeId,
NumWorkers: 2,
SparkConf: map[string]string{
"spark.databricks.enableWsfs": "true",
"spark.databricks.hive.metastore.glueCatalog.enabled": "true",
"spark.databricks.pip.ignoreSSL": "true",
},
},
},
})
require.NoError(t, err)

View File

@ -13,7 +13,21 @@ import (
"gopkg.in/yaml.v3"
)
func SaveAsYAML(data any, filename string, force bool) error {
type saver struct {
nodesWithStyle map[string]yaml.Style
}
func NewSaver() *saver {
return &saver{}
}
func NewSaverWithStyle(nodesWithStyle map[string]yaml.Style) *saver {
return &saver{
nodesWithStyle: nodesWithStyle,
}
}
func (s *saver) SaveAsYAML(data any, filename string, force bool) error {
err := os.MkdirAll(filepath.Dir(filename), 0755)
if err != nil {
return err
@ -36,15 +50,15 @@ func SaveAsYAML(data any, filename string, force bool) error {
}
defer file.Close()
err = encode(data, file)
err = s.encode(data, file)
if err != nil {
return err
}
return nil
}
func encode(data any, w io.Writer) error {
yamlNode, err := ToYamlNode(dyn.V(data))
func (s *saver) encode(data any, w io.Writer) error {
yamlNode, err := s.toYamlNode(dyn.V(data))
if err != nil {
return err
}
@ -53,7 +67,11 @@ func encode(data any, w io.Writer) error {
return enc.Encode(yamlNode)
}
func ToYamlNode(v dyn.Value) (*yaml.Node, error) {
func (s *saver) toYamlNode(v dyn.Value) (*yaml.Node, error) {
return s.toYamlNodeWithStyle(v, yaml.Style(0))
}
func (s *saver) toYamlNodeWithStyle(v dyn.Value, style yaml.Style) (*yaml.Node, error) {
switch v.Kind() {
case dyn.KindMap:
m, _ := v.AsMap()
@ -68,8 +86,14 @@ func ToYamlNode(v dyn.Value) (*yaml.Node, error) {
content := make([]*yaml.Node, 0)
for _, k := range keys {
item := m[k]
node := yaml.Node{Kind: yaml.ScalarNode, Value: k}
c, err := ToYamlNode(item)
node := yaml.Node{Kind: yaml.ScalarNode, Value: k, Style: style}
var nestedNodeStyle yaml.Style
if customStyle, ok := s.hasStyle(k); ok {
nestedNodeStyle = customStyle
} else {
nestedNodeStyle = style
}
c, err := s.toYamlNodeWithStyle(item, nestedNodeStyle)
if err != nil {
return nil, err
}
@ -77,40 +101,45 @@ func ToYamlNode(v dyn.Value) (*yaml.Node, error) {
content = append(content, c)
}
return &yaml.Node{Kind: yaml.MappingNode, Content: content}, nil
return &yaml.Node{Kind: yaml.MappingNode, Content: content, Style: style}, nil
case dyn.KindSequence:
s, _ := v.AsSequence()
seq, _ := v.AsSequence()
content := make([]*yaml.Node, 0)
for _, item := range s {
node, err := ToYamlNode(item)
for _, item := range seq {
node, err := s.toYamlNodeWithStyle(item, style)
if err != nil {
return nil, err
}
content = append(content, node)
}
return &yaml.Node{Kind: yaml.SequenceNode, Content: content}, nil
return &yaml.Node{Kind: yaml.SequenceNode, Content: content, Style: style}, nil
case dyn.KindNil:
return &yaml.Node{Kind: yaml.ScalarNode, Value: "null"}, nil
return &yaml.Node{Kind: yaml.ScalarNode, Value: "null", Style: style}, nil
case dyn.KindString:
// If the string is a scalar value (bool, int, float and etc.), we want to quote it.
if isScalarValueInString(v) {
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustString(), Style: yaml.DoubleQuotedStyle}, nil
}
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustString()}, nil
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustString(), Style: style}, nil
case dyn.KindBool:
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustBool())}, nil
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustBool()), Style: style}, nil
case dyn.KindInt:
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustInt())}, nil
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustInt()), Style: style}, nil
case dyn.KindFloat:
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustFloat())}, nil
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustFloat()), Style: style}, nil
case dyn.KindTime:
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustTime().UTC().String()}, nil
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustTime().UTC().String(), Style: style}, nil
default:
// Panic because we only want to deal with known types.
panic(fmt.Sprintf("invalid kind: %d", v.Kind()))
}
}
func (s *saver) hasStyle(key string) (yaml.Style, bool) {
style, ok := s.nodesWithStyle[key]
return style, ok
}
func isScalarValueInString(v dyn.Value) bool {
if v.Kind() != dyn.KindString {
return false

View File

@ -10,45 +10,51 @@ import (
)
func TestMarshalNilValue(t *testing.T) {
s := NewSaver()
var nilValue = dyn.NilValue
v, err := ToYamlNode(nilValue)
v, err := s.toYamlNode(nilValue)
assert.NoError(t, err)
assert.Equal(t, "null", v.Value)
}
func TestMarshalIntValue(t *testing.T) {
s := NewSaver()
var intValue = dyn.NewValue(1, dyn.Location{})
v, err := ToYamlNode(intValue)
v, err := s.toYamlNode(intValue)
assert.NoError(t, err)
assert.Equal(t, "1", v.Value)
assert.Equal(t, yaml.ScalarNode, v.Kind)
}
func TestMarshalFloatValue(t *testing.T) {
s := NewSaver()
var floatValue = dyn.NewValue(1.0, dyn.Location{})
v, err := ToYamlNode(floatValue)
v, err := s.toYamlNode(floatValue)
assert.NoError(t, err)
assert.Equal(t, "1", v.Value)
assert.Equal(t, yaml.ScalarNode, v.Kind)
}
func TestMarshalBoolValue(t *testing.T) {
s := NewSaver()
var boolValue = dyn.NewValue(true, dyn.Location{})
v, err := ToYamlNode(boolValue)
v, err := s.toYamlNode(boolValue)
assert.NoError(t, err)
assert.Equal(t, "true", v.Value)
assert.Equal(t, yaml.ScalarNode, v.Kind)
}
func TestMarshalTimeValue(t *testing.T) {
s := NewSaver()
var timeValue = dyn.NewValue(time.Unix(0, 0), dyn.Location{})
v, err := ToYamlNode(timeValue)
v, err := s.toYamlNode(timeValue)
assert.NoError(t, err)
assert.Equal(t, "1970-01-01 00:00:00 +0000 UTC", v.Value)
assert.Equal(t, yaml.ScalarNode, v.Kind)
}
func TestMarshalSequenceValue(t *testing.T) {
s := NewSaver()
var sequenceValue = dyn.NewValue(
[]dyn.Value{
dyn.NewValue("value1", dyn.Location{File: "file", Line: 1, Column: 2}),
@ -56,7 +62,7 @@ func TestMarshalSequenceValue(t *testing.T) {
},
dyn.Location{File: "file", Line: 1, Column: 2},
)
v, err := ToYamlNode(sequenceValue)
v, err := s.toYamlNode(sequenceValue)
assert.NoError(t, err)
assert.Equal(t, yaml.SequenceNode, v.Kind)
assert.Equal(t, "value1", v.Content[0].Value)
@ -64,14 +70,16 @@ func TestMarshalSequenceValue(t *testing.T) {
}
func TestMarshalStringValue(t *testing.T) {
s := NewSaver()
var stringValue = dyn.NewValue("value", dyn.Location{})
v, err := ToYamlNode(stringValue)
v, err := s.toYamlNode(stringValue)
assert.NoError(t, err)
assert.Equal(t, "value", v.Value)
assert.Equal(t, yaml.ScalarNode, v.Kind)
}
func TestMarshalMapValue(t *testing.T) {
s := NewSaver()
var mapValue = dyn.NewValue(
map[string]dyn.Value{
"key3": dyn.NewValue("value3", dyn.Location{File: "file", Line: 3, Column: 2}),
@ -80,7 +88,7 @@ func TestMarshalMapValue(t *testing.T) {
},
dyn.Location{File: "file", Line: 1, Column: 2},
)
v, err := ToYamlNode(mapValue)
v, err := s.toYamlNode(mapValue)
assert.NoError(t, err)
assert.Equal(t, yaml.MappingNode, v.Kind)
assert.Equal(t, "key1", v.Content[0].Value)
@ -94,6 +102,7 @@ func TestMarshalMapValue(t *testing.T) {
}
func TestMarshalNestedValues(t *testing.T) {
s := NewSaver()
var mapValue = dyn.NewValue(
map[string]dyn.Value{
"key1": dyn.NewValue(
@ -105,7 +114,7 @@ func TestMarshalNestedValues(t *testing.T) {
},
dyn.Location{File: "file", Line: 1, Column: 2},
)
v, err := ToYamlNode(mapValue)
v, err := s.toYamlNode(mapValue)
assert.NoError(t, err)
assert.Equal(t, yaml.MappingNode, v.Kind)
assert.Equal(t, "key1", v.Content[0].Value)
@ -115,15 +124,16 @@ func TestMarshalNestedValues(t *testing.T) {
}
func TestMarshalHexadecimalValueIsQuoted(t *testing.T) {
s := NewSaver()
var hexValue = dyn.NewValue(0x123, dyn.Location{})
v, err := ToYamlNode(hexValue)
v, err := s.toYamlNode(hexValue)
assert.NoError(t, err)
assert.Equal(t, "291", v.Value)
assert.Equal(t, yaml.Style(0), v.Style)
assert.Equal(t, yaml.ScalarNode, v.Kind)
var stringValue = dyn.NewValue("0x123", dyn.Location{})
v, err = ToYamlNode(stringValue)
v, err = s.toYamlNode(stringValue)
assert.NoError(t, err)
assert.Equal(t, "0x123", v.Value)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Style)
@ -131,15 +141,16 @@ func TestMarshalHexadecimalValueIsQuoted(t *testing.T) {
}
func TestMarshalBinaryValueIsQuoted(t *testing.T) {
s := NewSaver()
var binaryValue = dyn.NewValue(0b101, dyn.Location{})
v, err := ToYamlNode(binaryValue)
v, err := s.toYamlNode(binaryValue)
assert.NoError(t, err)
assert.Equal(t, "5", v.Value)
assert.Equal(t, yaml.Style(0), v.Style)
assert.Equal(t, yaml.ScalarNode, v.Kind)
var stringValue = dyn.NewValue("0b101", dyn.Location{})
v, err = ToYamlNode(stringValue)
v, err = s.toYamlNode(stringValue)
assert.NoError(t, err)
assert.Equal(t, "0b101", v.Value)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Style)
@ -147,15 +158,16 @@ func TestMarshalBinaryValueIsQuoted(t *testing.T) {
}
func TestMarshalOctalValueIsQuoted(t *testing.T) {
s := NewSaver()
var octalValue = dyn.NewValue(0123, dyn.Location{})
v, err := ToYamlNode(octalValue)
v, err := s.toYamlNode(octalValue)
assert.NoError(t, err)
assert.Equal(t, "83", v.Value)
assert.Equal(t, yaml.Style(0), v.Style)
assert.Equal(t, yaml.ScalarNode, v.Kind)
var stringValue = dyn.NewValue("0123", dyn.Location{})
v, err = ToYamlNode(stringValue)
v, err = s.toYamlNode(stringValue)
assert.NoError(t, err)
assert.Equal(t, "0123", v.Value)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Style)
@ -163,15 +175,16 @@ func TestMarshalOctalValueIsQuoted(t *testing.T) {
}
func TestMarshalFloatValueIsQuoted(t *testing.T) {
s := NewSaver()
var floatValue = dyn.NewValue(1.0, dyn.Location{})
v, err := ToYamlNode(floatValue)
v, err := s.toYamlNode(floatValue)
assert.NoError(t, err)
assert.Equal(t, "1", v.Value)
assert.Equal(t, yaml.Style(0), v.Style)
assert.Equal(t, yaml.ScalarNode, v.Kind)
var stringValue = dyn.NewValue("1.0", dyn.Location{})
v, err = ToYamlNode(stringValue)
v, err = s.toYamlNode(stringValue)
assert.NoError(t, err)
assert.Equal(t, "1.0", v.Value)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Style)
@ -179,17 +192,79 @@ func TestMarshalFloatValueIsQuoted(t *testing.T) {
}
func TestMarshalBoolValueIsQuoted(t *testing.T) {
s := NewSaver()
var boolValue = dyn.NewValue(true, dyn.Location{})
v, err := ToYamlNode(boolValue)
v, err := s.toYamlNode(boolValue)
assert.NoError(t, err)
assert.Equal(t, "true", v.Value)
assert.Equal(t, yaml.Style(0), v.Style)
assert.Equal(t, yaml.ScalarNode, v.Kind)
var stringValue = dyn.NewValue("true", dyn.Location{})
v, err = ToYamlNode(stringValue)
v, err = s.toYamlNode(stringValue)
assert.NoError(t, err)
assert.Equal(t, "true", v.Value)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Style)
assert.Equal(t, yaml.ScalarNode, v.Kind)
}
func TestCustomStylingWithNestedMap(t *testing.T) {
s := NewSaverWithStyle(map[string]yaml.Style{
"styled": yaml.DoubleQuotedStyle,
})
var styledMap = dyn.NewValue(
map[string]dyn.Value{
"key1": dyn.NewValue("value1", dyn.Location{File: "file", Line: 1, Column: 2}),
"key2": dyn.NewValue("value2", dyn.Location{File: "file", Line: 2, Column: 2}),
},
dyn.Location{File: "file", Line: -2, Column: 2},
)
var unstyledMap = dyn.NewValue(
map[string]dyn.Value{
"key3": dyn.NewValue("value3", dyn.Location{File: "file", Line: 1, Column: 2}),
"key4": dyn.NewValue("value4", dyn.Location{File: "file", Line: 2, Column: 2}),
},
dyn.Location{File: "file", Line: -1, Column: 2},
)
var val = dyn.NewValue(
map[string]dyn.Value{
"styled": styledMap,
"unstyled": unstyledMap,
},
dyn.Location{File: "file", Line: 1, Column: 2},
)
mv, err := s.toYamlNode(val)
assert.NoError(t, err)
// Check that the styled map is quoted
v := mv.Content[1]
assert.Equal(t, yaml.MappingNode, v.Kind)
assert.Equal(t, "key1", v.Content[0].Value)
assert.Equal(t, "value1", v.Content[1].Value)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Content[0].Style)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Content[1].Style)
assert.Equal(t, "key2", v.Content[2].Value)
assert.Equal(t, "value2", v.Content[3].Value)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Content[2].Style)
assert.Equal(t, yaml.DoubleQuotedStyle, v.Content[3].Style)
// Check that the unstyled map is not quoted
v = mv.Content[3]
assert.Equal(t, yaml.MappingNode, v.Kind)
assert.Equal(t, "key3", v.Content[0].Value)
assert.Equal(t, "value3", v.Content[1].Value)
assert.Equal(t, yaml.Style(0), v.Content[0].Style)
assert.Equal(t, yaml.Style(0), v.Content[1].Style)
assert.Equal(t, "key4", v.Content[2].Value)
assert.Equal(t, "value4", v.Content[3].Value)
assert.Equal(t, yaml.Style(0), v.Content[2].Style)
assert.Equal(t, yaml.Style(0), v.Content[3].Style)
}