databricks-cli/libs/template/renderer_test.go

661 lines
19 KiB
Go
Raw Normal View History

package template
import (
"context"
"fmt"
"io/fs"
"os"
"path"
"path/filepath"
"runtime"
"strings"
"testing"
"text/template"
"github.com/databricks/cli/bundle"
bundleConfig "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/phases"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/tags"
"github.com/databricks/databricks-sdk-go"
workspaceConfig "github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func assertFileContent(t *testing.T, path string, content string) {
b, err := os.ReadFile(path)
require.NoError(t, err)
assert.Equal(t, content, string(b))
}
func assertFilePermissions(t *testing.T, path string, perm fs.FileMode) {
info, err := os.Stat(path)
require.NoError(t, err)
assert.Equal(t, perm, info.Mode().Perm())
}
Add an experimental dbt-sql template (#1059) ## Changes This adds a new dbt-sql template. This work requires the new WorkspaceFS support for dbt tasks. In this latest revision, I've hidden the new template from the list so we can merge it, iterate over it, and propertly release the template at the right time. Blockers: - [x] WorkspaceFS support for dbt projects is in prod - [x] Move dbt files into a subdirectory - [ ] Wait until the next (>1.7.4) release of the dbt plugin which will have major improvements! - _Rather than wait, this template is hidden from the list of templates._ - [x] SQL extension is preconfigured based on extension settings (if possible) - MV / streaming tables: - [x] Add to template - [x] Fix https://github.com/databricks/dbt-databricks/issues/535 (to be released with in 1.7.4) - [x] Merge https://github.com/databricks/dbt-databricks/pull/338 (to be released with in 1.7.4) - [ ] Fix "too many 503 errors" issue (https://github.com/databricks/dbt-databricks/issues/570, internal tracker: ES-1009215, ES-1014138) - [x] Support ANSI mode in the template - [ ] Streaming tables support is either ungated or the template provides instructions about signup - _Mitigation for now: this template is hidden from the list of templates._ - [x] Support non-workspace-admin deployment - [x] Make sure `data_security_mode: SINGLE_USER` works on non-UC workspaces (it's required to be explicitly specified on UC workspaces with single-node clusters) - [x] Support non-UC workspaces ## Tests - [x] Unit tests - [x] Manual testing - [x] More manual testing - [ ] Reviewer manual testing - _I'd like to do a small bug bash post-merging._ - [x] Unit tests
2024-02-19 09:15:17 +00:00
func assertBuiltinTemplateValid(t *testing.T, template string, settings map[string]any, target string, isServicePrincipal bool, build bool, tempDir string) {
ctx := context.Background()
templateFS, err := fs.Sub(builtinTemplates, path.Join("templates", template))
require.NoError(t, err)
w := &databricks.WorkspaceClient{
Config: &workspaceConfig.Config{Host: "https://myhost.com"},
}
// Prepare helpers
cachedUser = &iam.User{UserName: "user@domain.com"}
Add an experimental dbt-sql template (#1059) ## Changes This adds a new dbt-sql template. This work requires the new WorkspaceFS support for dbt tasks. In this latest revision, I've hidden the new template from the list so we can merge it, iterate over it, and propertly release the template at the right time. Blockers: - [x] WorkspaceFS support for dbt projects is in prod - [x] Move dbt files into a subdirectory - [ ] Wait until the next (>1.7.4) release of the dbt plugin which will have major improvements! - _Rather than wait, this template is hidden from the list of templates._ - [x] SQL extension is preconfigured based on extension settings (if possible) - MV / streaming tables: - [x] Add to template - [x] Fix https://github.com/databricks/dbt-databricks/issues/535 (to be released with in 1.7.4) - [x] Merge https://github.com/databricks/dbt-databricks/pull/338 (to be released with in 1.7.4) - [ ] Fix "too many 503 errors" issue (https://github.com/databricks/dbt-databricks/issues/570, internal tracker: ES-1009215, ES-1014138) - [x] Support ANSI mode in the template - [ ] Streaming tables support is either ungated or the template provides instructions about signup - _Mitigation for now: this template is hidden from the list of templates._ - [x] Support non-workspace-admin deployment - [x] Make sure `data_security_mode: SINGLE_USER` works on non-UC workspaces (it's required to be explicitly specified on UC workspaces with single-node clusters) - [x] Support non-UC workspaces ## Tests - [x] Unit tests - [x] Manual testing - [x] More manual testing - [ ] Reviewer manual testing - _I'd like to do a small bug bash post-merging._ - [x] Unit tests
2024-02-19 09:15:17 +00:00
if isServicePrincipal {
cachedUser.UserName = "1d410060-a513-496f-a197-23cc82e5f46d"
}
cachedIsServicePrincipal = &isServicePrincipal
ctx = root.SetWorkspaceClient(ctx, w)
helpers := loadHelpers(ctx)
renderer, err := newRenderer(ctx, settings, helpers, templateFS, templateDirName, libraryDirName, tempDir)
require.NoError(t, err)
// Evaluate template
err = renderer.walk()
require.NoError(t, err)
err = renderer.persistToDisk()
require.NoError(t, err)
b, err := bundle.Load(ctx, filepath.Join(tempDir, "my_project"))
require.NoError(t, err)
diags := bundle.Apply(ctx, b, phases.LoadNamedTarget(target))
require.NoError(t, diags.Error())
// Apply initialize / validation mutators
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
Use dynamic configuration model in bundles (#1098) ## Changes This is a fundamental change to how we load and process bundle configuration. We now depend on the configuration being represented as a `dyn.Value`. This representation is functionally equivalent to Go's `any` (it is variadic) and allows us to capture metadata associated with a value, such as where it was defined (e.g. file, line, and column). It also allows us to represent Go's zero values properly (e.g. empty string, integer equal to 0, or boolean false). Using this representation allows us to let the configuration model deviate from the typed structure we have been relying on so far (`config.Root`). We need to deviate from these types when using variables for fields that are not a string themselves. For example, using `${var.num_workers}` for an integer `workers` field was impossible until now (though not implemented in this change). The loader for a `dyn.Value` includes functionality to capture any and all type mismatches between the user-defined configuration and the expected types. These mismatches can be surfaced as validation errors in future PRs. Given that many mutators expect the typed struct to be the source of truth, this change converts between the dynamic representation and the typed representation on mutator entry and exit. Existing mutators can continue to modify the typed representation and these modifications are reflected in the dynamic representation (see `MarkMutatorEntry` and `MarkMutatorExit` in `bundle/config/root.go`). Required changes included in this change: * The existing interpolation package is removed in favor of `libs/dyn/dynvar`. * Functionality to merge job clusters, job tasks, and pipeline clusters are now all broken out into their own mutators. To be implemented later: * Allow variable references for non-string types. * Surface diagnostics about the configuration provided by the user in the validation output. * Some mutators use a resource's configuration file path to resolve related relative paths. These depend on `bundle/config/paths.Path` being set and populated through `ConfigureConfigFilePath`. Instead, they should interact with the dynamically typed configuration directly. Doing this also unlocks being able to differentiate different base paths used within a job (e.g. a task override with a relative path defined in a directory other than the base job). ## Tests * Existing unit tests pass (some have been modified to accommodate) * Integration tests pass
2024-02-16 19:41:58 +00:00
b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser}
b.Config.Bundle.Terraform = &bundleConfig.Terraform{
ExecPath: "sh",
}
Use dynamic configuration model in bundles (#1098) ## Changes This is a fundamental change to how we load and process bundle configuration. We now depend on the configuration being represented as a `dyn.Value`. This representation is functionally equivalent to Go's `any` (it is variadic) and allows us to capture metadata associated with a value, such as where it was defined (e.g. file, line, and column). It also allows us to represent Go's zero values properly (e.g. empty string, integer equal to 0, or boolean false). Using this representation allows us to let the configuration model deviate from the typed structure we have been relying on so far (`config.Root`). We need to deviate from these types when using variables for fields that are not a string themselves. For example, using `${var.num_workers}` for an integer `workers` field was impossible until now (though not implemented in this change). The loader for a `dyn.Value` includes functionality to capture any and all type mismatches between the user-defined configuration and the expected types. These mismatches can be surfaced as validation errors in future PRs. Given that many mutators expect the typed struct to be the source of truth, this change converts between the dynamic representation and the typed representation on mutator entry and exit. Existing mutators can continue to modify the typed representation and these modifications are reflected in the dynamic representation (see `MarkMutatorEntry` and `MarkMutatorExit` in `bundle/config/root.go`). Required changes included in this change: * The existing interpolation package is removed in favor of `libs/dyn/dynvar`. * Functionality to merge job clusters, job tasks, and pipeline clusters are now all broken out into their own mutators. To be implemented later: * Allow variable references for non-string types. * Surface diagnostics about the configuration provided by the user in the validation output. * Some mutators use a resource's configuration file path to resolve related relative paths. These depend on `bundle/config/paths.Path` being set and populated through `ConfigureConfigFilePath`. Instead, they should interact with the dynamically typed configuration directly. Doing this also unlocks being able to differentiate different base paths used within a job (e.g. a task override with a relative path defined in a directory other than the base job). ## Tests * Existing unit tests pass (some have been modified to accommodate) * Integration tests pass
2024-02-16 19:41:58 +00:00
return nil
})
b.Tagging = tags.ForCloud(w.Config)
b.WorkspaceClient()
diags = bundle.Apply(ctx, b, bundle.Seq(
phases.Initialize(),
))
require.NoError(t, diags.Error())
// Apply build mutator
if build {
diags = bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error())
}
}
Add an experimental dbt-sql template (#1059) ## Changes This adds a new dbt-sql template. This work requires the new WorkspaceFS support for dbt tasks. In this latest revision, I've hidden the new template from the list so we can merge it, iterate over it, and propertly release the template at the right time. Blockers: - [x] WorkspaceFS support for dbt projects is in prod - [x] Move dbt files into a subdirectory - [ ] Wait until the next (>1.7.4) release of the dbt plugin which will have major improvements! - _Rather than wait, this template is hidden from the list of templates._ - [x] SQL extension is preconfigured based on extension settings (if possible) - MV / streaming tables: - [x] Add to template - [x] Fix https://github.com/databricks/dbt-databricks/issues/535 (to be released with in 1.7.4) - [x] Merge https://github.com/databricks/dbt-databricks/pull/338 (to be released with in 1.7.4) - [ ] Fix "too many 503 errors" issue (https://github.com/databricks/dbt-databricks/issues/570, internal tracker: ES-1009215, ES-1014138) - [x] Support ANSI mode in the template - [ ] Streaming tables support is either ungated or the template provides instructions about signup - _Mitigation for now: this template is hidden from the list of templates._ - [x] Support non-workspace-admin deployment - [x] Make sure `data_security_mode: SINGLE_USER` works on non-UC workspaces (it's required to be explicitly specified on UC workspaces with single-node clusters) - [x] Support non-UC workspaces ## Tests - [x] Unit tests - [x] Manual testing - [x] More manual testing - [ ] Reviewer manual testing - _I'd like to do a small bug bash post-merging._ - [x] Unit tests
2024-02-19 09:15:17 +00:00
func TestBuiltinPythonTemplateValid(t *testing.T) {
// Test option combinations
options := []string{"yes", "no"}
isServicePrincipal := false
Add an experimental dbt-sql template (#1059) ## Changes This adds a new dbt-sql template. This work requires the new WorkspaceFS support for dbt tasks. In this latest revision, I've hidden the new template from the list so we can merge it, iterate over it, and propertly release the template at the right time. Blockers: - [x] WorkspaceFS support for dbt projects is in prod - [x] Move dbt files into a subdirectory - [ ] Wait until the next (>1.7.4) release of the dbt plugin which will have major improvements! - _Rather than wait, this template is hidden from the list of templates._ - [x] SQL extension is preconfigured based on extension settings (if possible) - MV / streaming tables: - [x] Add to template - [x] Fix https://github.com/databricks/dbt-databricks/issues/535 (to be released with in 1.7.4) - [x] Merge https://github.com/databricks/dbt-databricks/pull/338 (to be released with in 1.7.4) - [ ] Fix "too many 503 errors" issue (https://github.com/databricks/dbt-databricks/issues/570, internal tracker: ES-1009215, ES-1014138) - [x] Support ANSI mode in the template - [ ] Streaming tables support is either ungated or the template provides instructions about signup - _Mitigation for now: this template is hidden from the list of templates._ - [x] Support non-workspace-admin deployment - [x] Make sure `data_security_mode: SINGLE_USER` works on non-UC workspaces (it's required to be explicitly specified on UC workspaces with single-node clusters) - [x] Support non-UC workspaces ## Tests - [x] Unit tests - [x] Manual testing - [x] More manual testing - [ ] Reviewer manual testing - _I'd like to do a small bug bash post-merging._ - [x] Unit tests
2024-02-19 09:15:17 +00:00
catalog := "hive_metastore"
cachedCatalog = &catalog
build := false
for _, includeNotebook := range options {
for _, includeDlt := range options {
for _, includePython := range options {
for _, isServicePrincipal := range []bool{true, false} {
config := map[string]any{
"project_name": "my_project",
"include_notebook": includeNotebook,
"include_dlt": includeDlt,
"include_python": includePython,
}
tempDir := t.TempDir()
Add an experimental dbt-sql template (#1059) ## Changes This adds a new dbt-sql template. This work requires the new WorkspaceFS support for dbt tasks. In this latest revision, I've hidden the new template from the list so we can merge it, iterate over it, and propertly release the template at the right time. Blockers: - [x] WorkspaceFS support for dbt projects is in prod - [x] Move dbt files into a subdirectory - [ ] Wait until the next (>1.7.4) release of the dbt plugin which will have major improvements! - _Rather than wait, this template is hidden from the list of templates._ - [x] SQL extension is preconfigured based on extension settings (if possible) - MV / streaming tables: - [x] Add to template - [x] Fix https://github.com/databricks/dbt-databricks/issues/535 (to be released with in 1.7.4) - [x] Merge https://github.com/databricks/dbt-databricks/pull/338 (to be released with in 1.7.4) - [ ] Fix "too many 503 errors" issue (https://github.com/databricks/dbt-databricks/issues/570, internal tracker: ES-1009215, ES-1014138) - [x] Support ANSI mode in the template - [ ] Streaming tables support is either ungated or the template provides instructions about signup - _Mitigation for now: this template is hidden from the list of templates._ - [x] Support non-workspace-admin deployment - [x] Make sure `data_security_mode: SINGLE_USER` works on non-UC workspaces (it's required to be explicitly specified on UC workspaces with single-node clusters) - [x] Support non-UC workspaces ## Tests - [x] Unit tests - [x] Manual testing - [x] More manual testing - [ ] Reviewer manual testing - _I'd like to do a small bug bash post-merging._ - [x] Unit tests
2024-02-19 09:15:17 +00:00
assertBuiltinTemplateValid(t, "default-python", config, "dev", isServicePrincipal, build, tempDir)
}
}
}
}
// Test prod mode + build
config := map[string]any{
"project_name": "my_project",
"include_notebook": "yes",
"include_dlt": "yes",
"include_python": "yes",
}
isServicePrincipal = false
build = true
// On Windows, we can't always remove the resulting temp dir since background
// processes might have it open, so we use 'defer' for a best-effort cleanup
tempDir, err := os.MkdirTemp("", "templates")
require.NoError(t, err)
defer os.RemoveAll(tempDir)
Add an experimental dbt-sql template (#1059) ## Changes This adds a new dbt-sql template. This work requires the new WorkspaceFS support for dbt tasks. In this latest revision, I've hidden the new template from the list so we can merge it, iterate over it, and propertly release the template at the right time. Blockers: - [x] WorkspaceFS support for dbt projects is in prod - [x] Move dbt files into a subdirectory - [ ] Wait until the next (>1.7.4) release of the dbt plugin which will have major improvements! - _Rather than wait, this template is hidden from the list of templates._ - [x] SQL extension is preconfigured based on extension settings (if possible) - MV / streaming tables: - [x] Add to template - [x] Fix https://github.com/databricks/dbt-databricks/issues/535 (to be released with in 1.7.4) - [x] Merge https://github.com/databricks/dbt-databricks/pull/338 (to be released with in 1.7.4) - [ ] Fix "too many 503 errors" issue (https://github.com/databricks/dbt-databricks/issues/570, internal tracker: ES-1009215, ES-1014138) - [x] Support ANSI mode in the template - [ ] Streaming tables support is either ungated or the template provides instructions about signup - _Mitigation for now: this template is hidden from the list of templates._ - [x] Support non-workspace-admin deployment - [x] Make sure `data_security_mode: SINGLE_USER` works on non-UC workspaces (it's required to be explicitly specified on UC workspaces with single-node clusters) - [x] Support non-UC workspaces ## Tests - [x] Unit tests - [x] Manual testing - [x] More manual testing - [ ] Reviewer manual testing - _I'd like to do a small bug bash post-merging._ - [x] Unit tests
2024-02-19 09:15:17 +00:00
assertBuiltinTemplateValid(t, "default-python", config, "prod", isServicePrincipal, build, tempDir)
defer os.RemoveAll(tempDir)
}
func TestBuiltinSQLTemplateValid(t *testing.T) {
for _, personal_schemas := range []string{"yes", "no"} {
for _, target := range []string{"dev", "prod"} {
for _, isServicePrincipal := range []bool{true, false} {
config := map[string]any{
"project_name": "my_project",
"http_path": "/sql/1.0/warehouses/123abc",
"default_catalog": "users",
"shared_schema": "lennart",
"personal_schemas": personal_schemas,
}
build := false
assertBuiltinTemplateValid(t, "default-sql", config, target, isServicePrincipal, build, t.TempDir())
}
}
}
}
Add an experimental dbt-sql template (#1059) ## Changes This adds a new dbt-sql template. This work requires the new WorkspaceFS support for dbt tasks. In this latest revision, I've hidden the new template from the list so we can merge it, iterate over it, and propertly release the template at the right time. Blockers: - [x] WorkspaceFS support for dbt projects is in prod - [x] Move dbt files into a subdirectory - [ ] Wait until the next (>1.7.4) release of the dbt plugin which will have major improvements! - _Rather than wait, this template is hidden from the list of templates._ - [x] SQL extension is preconfigured based on extension settings (if possible) - MV / streaming tables: - [x] Add to template - [x] Fix https://github.com/databricks/dbt-databricks/issues/535 (to be released with in 1.7.4) - [x] Merge https://github.com/databricks/dbt-databricks/pull/338 (to be released with in 1.7.4) - [ ] Fix "too many 503 errors" issue (https://github.com/databricks/dbt-databricks/issues/570, internal tracker: ES-1009215, ES-1014138) - [x] Support ANSI mode in the template - [ ] Streaming tables support is either ungated or the template provides instructions about signup - _Mitigation for now: this template is hidden from the list of templates._ - [x] Support non-workspace-admin deployment - [x] Make sure `data_security_mode: SINGLE_USER` works on non-UC workspaces (it's required to be explicitly specified on UC workspaces with single-node clusters) - [x] Support non-UC workspaces ## Tests - [x] Unit tests - [x] Manual testing - [x] More manual testing - [ ] Reviewer manual testing - _I'd like to do a small bug bash post-merging._ - [x] Unit tests
2024-02-19 09:15:17 +00:00
func TestBuiltinDbtTemplateValid(t *testing.T) {
for _, personal_schemas := range []string{"yes", "no"} {
for _, target := range []string{"dev", "prod"} {
for _, isServicePrincipal := range []bool{true, false} {
config := map[string]any{
"project_name": "my_project",
"http_path": "/sql/1.0/warehouses/123",
"default_catalog": "hive_metastore",
"personal_schemas": personal_schemas,
"shared_schema": "lennart",
}
build := false
assertBuiltinTemplateValid(t, "dbt-sql", config, target, isServicePrincipal, build, t.TempDir())
}
}
}
}
func TestRendererWithAssociatedTemplateInLibrary(t *testing.T) {
tmpDir := t.TempDir()
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/email/template", "./testdata/email/library", tmpDir)
require.NoError(t, err)
err = r.walk()
require.NoError(t, err)
err = r.persistToDisk()
require.NoError(t, err)
b, err := os.ReadFile(filepath.Join(tmpDir, "my_email"))
require.NoError(t, err)
assert.Equal(t, "shreyas.goenka@databricks.com", strings.Trim(string(b), "\n\r"))
}
func TestRendererExecuteTemplate(t *testing.T) {
templateText :=
`"{{.count}} items are made of {{.Material}}".
{{if eq .Animal "sheep" }}
Sheep wool is the best!
{{else}}
{{.Animal}} wool is not too bad...
{{end}}
My email is {{template "email"}}
`
r := renderer{
config: map[string]any{
"Material": "wool",
"count": 1,
"Animal": "sheep",
},
baseTemplate: template.Must(template.New("base").Parse(`{{define "email"}}shreyas.goenka@databricks.com{{end}}`)),
}
statement, err := r.executeTemplate(templateText)
require.NoError(t, err)
assert.Contains(t, statement, `"1 items are made of wool"`)
assert.NotContains(t, statement, `cat wool is not too bad.."`)
assert.Contains(t, statement, "Sheep wool is the best!")
assert.Contains(t, statement, `My email is shreyas.goenka@databricks.com`)
r = renderer{
config: map[string]any{
"Material": "wool",
"count": 1,
"Animal": "cat",
},
baseTemplate: template.Must(template.New("base").Parse(`{{define "email"}}hrithik.roshan@databricks.com{{end}}`)),
}
statement, err = r.executeTemplate(templateText)
require.NoError(t, err)
assert.Contains(t, statement, `"1 items are made of wool"`)
assert.Contains(t, statement, `cat wool is not too bad...`)
assert.NotContains(t, statement, "Sheep wool is the best!")
assert.Contains(t, statement, `My email is hrithik.roshan@databricks.com`)
}
func TestRendererExecuteTemplateWithUnknownProperty(t *testing.T) {
templateText := `{{.does_not_exist}}`
r := renderer{
config: map[string]any{},
baseTemplate: template.New("base"),
}
_, err := r.executeTemplate(templateText)
assert.ErrorContains(t, err, "variable \"does_not_exist\" not defined")
}
func TestRendererIsSkipped(t *testing.T) {
skipPatterns := []string{"a*", "*yz", "def", "a/b/*"}
// skipped paths
match, err := isSkipped("abc", skipPatterns)
require.NoError(t, err)
assert.True(t, match)
match, err = isSkipped("abcd", skipPatterns)
require.NoError(t, err)
assert.True(t, match)
match, err = isSkipped("a", skipPatterns)
require.NoError(t, err)
assert.True(t, match)
match, err = isSkipped("xxyz", skipPatterns)
require.NoError(t, err)
assert.True(t, match)
match, err = isSkipped("yz", skipPatterns)
require.NoError(t, err)
assert.True(t, match)
match, err = isSkipped("a/b/c", skipPatterns)
require.NoError(t, err)
assert.True(t, match)
// NOT skipped paths
match, err = isSkipped(".", skipPatterns)
require.NoError(t, err)
assert.False(t, match)
match, err = isSkipped("y", skipPatterns)
require.NoError(t, err)
assert.False(t, match)
match, err = isSkipped("z", skipPatterns)
require.NoError(t, err)
assert.False(t, match)
match, err = isSkipped("defg", skipPatterns)
require.NoError(t, err)
assert.False(t, match)
match, err = isSkipped("cat", skipPatterns)
require.NoError(t, err)
assert.False(t, match)
match, err = isSkipped("a/b/c/d", skipPatterns)
require.NoError(t, err)
assert.False(t, match)
}
func TestRendererPersistToDisk(t *testing.T) {
tmpDir := t.TempDir()
ctx := context.Background()
r := &renderer{
ctx: ctx,
instanceRoot: tmpDir,
skipPatterns: []string{"a/b/c", "mn*"},
files: []file{
&inMemoryFile{
dstPath: &destinationPath{
root: tmpDir,
relPath: "a/b/c",
},
perm: 0444,
content: nil,
},
&inMemoryFile{
dstPath: &destinationPath{
root: tmpDir,
relPath: "mno",
},
perm: 0444,
content: nil,
},
&inMemoryFile{
dstPath: &destinationPath{
root: tmpDir,
relPath: "a/b/d",
},
perm: 0444,
content: []byte("123"),
},
&inMemoryFile{
dstPath: &destinationPath{
root: tmpDir,
relPath: "mmnn",
},
perm: 0444,
content: []byte("456"),
},
},
}
err := r.persistToDisk()
require.NoError(t, err)
assert.NoFileExists(t, filepath.Join(tmpDir, "a", "b", "c"))
assert.NoFileExists(t, filepath.Join(tmpDir, "mno"))
assertFileContent(t, filepath.Join(tmpDir, "a", "b", "d"), "123")
assertFilePermissions(t, filepath.Join(tmpDir, "a", "b", "d"), 0444)
assertFileContent(t, filepath.Join(tmpDir, "mmnn"), "456")
assertFilePermissions(t, filepath.Join(tmpDir, "mmnn"), 0444)
}
func TestRendererWalk(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/walk/template", "./testdata/walk/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
getContent := func(r *renderer, path string) string {
for _, f := range r.files {
if f.DstPath().relPath != path {
continue
}
b, err := f.contents()
require.NoError(t, err)
return strings.Trim(string(b), "\r\n")
}
require.FailNow(t, "file is absent: "+path)
return ""
}
assert.Len(t, r.files, 4)
assert.Equal(t, "file one", getContent(r, "file1"))
assert.Equal(t, "file two", getContent(r, "file2"))
assert.Equal(t, "file three", getContent(r, "dir1/dir3/file3"))
assert.Equal(t, "file four", getContent(r, "dir2/file4"))
}
func TestRendererFailFunction(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/fail/template", "./testdata/fail/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.Equal(t, "I am an error message", err.Error())
}
func TestRendererSkipsDirsEagerly(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/skip-dir-eagerly/template", "./testdata/skip-dir-eagerly/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
assert.Len(t, r.files, 1)
content := string(r.files[0].(*inMemoryFile).content)
assert.Equal(t, "I should be the only file created", strings.Trim(content, "\r\n"))
}
func TestRendererSkipAllFilesInCurrentDirectory(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/skip-all-files-in-cwd/template", "./testdata/skip-all-files-in-cwd/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
// All 3 files are executed and have in memory representations
require.Len(t, r.files, 3)
err = r.persistToDisk()
require.NoError(t, err)
entries, err := os.ReadDir(tmpDir)
require.NoError(t, err)
// Assert none of the files are persisted to disk, because of {{skip "*"}}
assert.Len(t, entries, 0)
}
func TestRendererSkipPatternsAreRelativeToFileDirectory(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/skip-is-relative/template", "./testdata/skip-is-relative/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
assert.Len(t, r.skipPatterns, 3)
assert.Contains(t, r.skipPatterns, "a")
assert.Contains(t, r.skipPatterns, "dir1/b")
assert.Contains(t, r.skipPatterns, "dir1/dir2/c")
}
func TestRendererSkip(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/skip/template", "./testdata/skip/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
// All 6 files are computed, even though "dir2/*" is present as a skip pattern
// This is because "dir2/*" matches the files in dir2, but not dir2 itself
assert.Len(t, r.files, 6)
err = r.persistToDisk()
require.NoError(t, err)
assert.FileExists(t, filepath.Join(tmpDir, "file1"))
assert.FileExists(t, filepath.Join(tmpDir, "file2"))
assert.FileExists(t, filepath.Join(tmpDir, "dir1/file5"))
// These files have been skipped
assert.NoFileExists(t, filepath.Join(tmpDir, "file3"))
assert.NoFileExists(t, filepath.Join(tmpDir, "dir1/file4"))
assert.NoDirExists(t, filepath.Join(tmpDir, "dir2"))
assert.NoFileExists(t, filepath.Join(tmpDir, "dir2/file6"))
}
func TestRendererReadsPermissionsBits(t *testing.T) {
if runtime.GOOS != "linux" && runtime.GOOS != "darwin" {
t.SkipNow()
}
tmpDir := t.TempDir()
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/executable-bit-read/template", "./testdata/executable-bit-read/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
getPermissions := func(r *renderer, path string) fs.FileMode {
for _, f := range r.files {
if f.DstPath().relPath != path {
continue
}
switch v := f.(type) {
case *inMemoryFile:
return v.perm
case *copyFile:
return v.perm
default:
require.FailNow(t, "execution should not reach here")
}
}
require.FailNow(t, "file is absent: "+path)
return 0
}
assert.Len(t, r.files, 2)
assert.Equal(t, getPermissions(r, "script.sh"), fs.FileMode(0755))
assert.Equal(t, getPermissions(r, "not-a-script"), fs.FileMode(0644))
}
func TestRendererErrorOnConflictingFile(t *testing.T) {
tmpDir := t.TempDir()
f, err := os.Create(filepath.Join(tmpDir, "a"))
require.NoError(t, err)
err = f.Close()
require.NoError(t, err)
r := renderer{
skipPatterns: []string{},
files: []file{
&inMemoryFile{
dstPath: &destinationPath{
root: tmpDir,
relPath: "a",
},
perm: 0444,
content: []byte("123"),
},
},
}
err = r.persistToDisk()
assert.EqualError(t, err, fmt.Sprintf("failed to initialize template, one or more files already exist: %s", filepath.Join(tmpDir, "a")))
}
func TestRendererNoErrorOnConflictingFileIfSkipped(t *testing.T) {
tmpDir := t.TempDir()
ctx := context.Background()
f, err := os.Create(filepath.Join(tmpDir, "a"))
require.NoError(t, err)
err = f.Close()
require.NoError(t, err)
r := renderer{
ctx: ctx,
skipPatterns: []string{"a"},
files: []file{
&inMemoryFile{
dstPath: &destinationPath{
root: tmpDir,
relPath: "a",
},
perm: 0444,
content: []byte("123"),
},
},
}
err = r.persistToDisk()
// No error is returned even though a conflicting file exists. This is because
// the generated file is being skipped
assert.NoError(t, err)
assert.Len(t, r.files, 1)
}
func TestRendererNonTemplatesAreCreatedAsCopyFiles(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, nil, helpers, os.DirFS("."), "./testdata/copy-file-walk/template", "./testdata/copy-file-walk/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
assert.Len(t, r.files, 1)
assert.Equal(t, r.files[0].(*copyFile).srcPath, "not-a-template")
assert.Equal(t, r.files[0].DstPath().absPath(), filepath.Join(tmpDir, "not-a-template"))
}
func TestRendererFileTreeRendering(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
tmpDir := t.TempDir()
helpers := loadHelpers(ctx)
r, err := newRenderer(ctx, map[string]any{
"dir_name": "my_directory",
"file_name": "my_file",
}, helpers, os.DirFS("."), "./testdata/file-tree-rendering/template", "./testdata/file-tree-rendering/library", tmpDir)
require.NoError(t, err)
err = r.walk()
assert.NoError(t, err)
// Assert in memory representation is created.
assert.Len(t, r.files, 1)
assert.Equal(t, r.files[0].DstPath().absPath(), filepath.Join(tmpDir, "my_directory", "my_file"))
err = r.persistToDisk()
require.NoError(t, err)
// Assert files and directories are correctly materialized.
assert.DirExists(t, filepath.Join(tmpDir, "my_directory"))
assert.FileExists(t, filepath.Join(tmpDir, "my_directory", "my_file"))
}
func TestRendererSubTemplateInPath(t *testing.T) {
ctx := context.Background()
ctx = root.SetWorkspaceClient(ctx, nil)
// Copy the template directory to a temporary directory where we can safely include a templated file path.
// These paths include characters that are forbidden in Go modules, so we can't use the testdata directory.
// Also see https://github.com/databricks/cli/pull/1671.
templateDir := t.TempDir()
testutil.CopyDirectory(t, "./testdata/template-in-path", templateDir)
// Use a backtick-quoted string; double quotes are a reserved character for Windows paths:
// https://learn.microsoft.com/en-us/windows/win32/fileio/naming-a-file.
testutil.Touch(t, filepath.Join(templateDir, "template/{{template `dir_name`}}/{{template `file_name`}}"))
tmpDir := t.TempDir()
r, err := newRenderer(ctx, nil, nil, os.DirFS(templateDir), "template", "library", tmpDir)
require.NoError(t, err)
err = r.walk()
require.NoError(t, err)
if assert.Len(t, r.files, 2) {
f := r.files[1]
assert.Equal(t, filepath.Join(tmpDir, "my_directory", "my_file"), f.DstPath().absPath())
assert.Equal(t, "my_directory/my_file", f.DstPath().relPath)
}
}