Compare commits

..

No commits in common. "2f7fa9760136eb197ed2de397898dbe81a8cbd52" and "d9ed8ebeaea6cb0e29c27ee9eb2b4070f7156daf" have entirely different histories.

30 changed files with 97 additions and 409 deletions

View File

@ -1,28 +1,5 @@
# Version changelog
## [Release] Release v0.227.1
CLI:
* Disable prompt for storage-credentials get command ([#1723](https://github.com/databricks/cli/pull/1723)).
Bundles:
* Do not treat empty path as a local path ([#1717](https://github.com/databricks/cli/pull/1717)).
* Correctly mark PyPI package name specs with multiple specifiers as remote libraries ([#1725](https://github.com/databricks/cli/pull/1725)).
* Improve error handling for /Volumes paths in mode: development ([#1716](https://github.com/databricks/cli/pull/1716)).
Internal:
* Ignore CLI version check on development builds of the CLI ([#1714](https://github.com/databricks/cli/pull/1714)).
API Changes:
* Added `databricks resource-quotas` command group.
* Added `databricks policy-compliance-for-clusters` command group.
* Added `databricks policy-compliance-for-jobs` command group.
OpenAPI commit 3eae49b444cac5a0118a3503e5b7ecef7f96527a (2024-08-21)
Dependency updates:
* Bump github.com/databricks/databricks-sdk-go from 0.44.0 to 0.45.0 ([#1719](https://github.com/databricks/cli/pull/1719)).
* Revert hc-install version to 0.7.0 ([#1711](https://github.com/databricks/cli/pull/1711)).
## [Release] Release v0.227.0
CLI:

View File

@ -6,5 +6,5 @@ type Deployment struct {
FailOnActiveRuns bool `json:"fail_on_active_runs,omitempty"`
// Lock configures locking behavior on deployment.
Lock Lock `json:"lock,omitempty"`
Lock Lock `json:"lock"`
}

View File

@ -64,7 +64,6 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) {
}
func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
p := b.Config.Presets
u := b.Config.Workspace.CurrentUser
@ -75,56 +74,44 @@ func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
// status to UNPAUSED at the level of an individual object, whic hwas
// historically allowed.)
if p.TriggerPauseStatus == config.Unpaused {
diags = diags.Append(diag.Diagnostic{
return diag.Diagnostics{{
Severity: diag.Error,
Summary: "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default",
Locations: []dyn.Location{b.Config.GetLocation("presets.trigger_pause_status")},
})
}}
}
// Make sure this development copy has unique names and paths to avoid conflicts
if path := findNonUserPath(b); path != "" {
if path == "artifact_path" && strings.HasPrefix(b.Config.Workspace.ArtifactPath, "/Volumes") {
// For Volumes paths we recommend including the current username as a substring
diags = diags.Extend(diag.Errorf("%s should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'", path))
} else {
// For non-Volumes paths recommend simply putting things in the home folder
diags = diags.Extend(diag.Errorf("%s must start with '~/' or contain the current username to ensure uniqueness when using 'mode: development'", path))
}
return diag.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path)
}
if p.NamePrefix != "" && !strings.Contains(p.NamePrefix, u.ShortName) && !strings.Contains(p.NamePrefix, u.UserName) {
// Resources such as pipelines require a unique name, e.g. '[dev steve] my_pipeline'.
// For this reason we require the name prefix to contain the current username;
// it's a pitfall for users if they don't include it and later find out that
// only a single user can do development deployments.
diags = diags.Append(diag.Diagnostic{
return diag.Diagnostics{{
Severity: diag.Error,
Summary: "prefix should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'",
Locations: []dyn.Location{b.Config.GetLocation("presets.name_prefix")},
})
}}
}
return diags
return nil
}
// findNonUserPath finds the first workspace path such as root_path that doesn't
// contain the current username or current user's shortname.
func findNonUserPath(b *bundle.Bundle) string {
containsName := func(path string) bool {
username := b.Config.Workspace.CurrentUser.UserName
shortname := b.Config.Workspace.CurrentUser.ShortName
return strings.Contains(path, username) || strings.Contains(path, shortname)
}
username := b.Config.Workspace.CurrentUser.UserName
if b.Config.Workspace.RootPath != "" && !containsName(b.Config.Workspace.RootPath) {
if b.Config.Workspace.RootPath != "" && !strings.Contains(b.Config.Workspace.RootPath, username) {
return "root_path"
}
if b.Config.Workspace.StatePath != "" && !containsName(b.Config.Workspace.StatePath) {
if b.Config.Workspace.StatePath != "" && !strings.Contains(b.Config.Workspace.StatePath, username) {
return "state_path"
}
if b.Config.Workspace.FilePath != "" && !containsName(b.Config.Workspace.FilePath) {
if b.Config.Workspace.FilePath != "" && !strings.Contains(b.Config.Workspace.FilePath, username) {
return "file_path"
}
if b.Config.Workspace.ArtifactPath != "" && !containsName(b.Config.Workspace.ArtifactPath) {
if b.Config.Workspace.ArtifactPath != "" && !strings.Contains(b.Config.Workspace.ArtifactPath, username) {
return "artifact_path"
}
return ""

View File

@ -230,20 +230,10 @@ func TestValidateDevelopmentMode(t *testing.T) {
diags := validateDevelopmentMode(b)
require.NoError(t, diags.Error())
// Test with /Volumes path
b = mockBundle(config.Development)
b.Config.Workspace.ArtifactPath = "/Volumes/catalog/schema/lennart/libs"
diags = validateDevelopmentMode(b)
require.NoError(t, diags.Error())
b.Config.Workspace.ArtifactPath = "/Volumes/catalog/schema/libs"
diags = validateDevelopmentMode(b)
require.ErrorContains(t, diags.Error(), "artifact_path should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'")
// Test with a bundle that has a non-user path
b = mockBundle(config.Development)
b.Config.Workspace.RootPath = "/Shared/.bundle/x/y/state"
diags = validateDevelopmentMode(b)
require.ErrorContains(t, diags.Error(), "root_path must start with '~/' or contain the current username to ensure uniqueness when using 'mode: development'")
require.ErrorContains(t, diags.Error(), "root_path")
// Test with a bundle that has an unpaused trigger pause status
b = mockBundle(config.Development)

View File

@ -1,21 +1,15 @@
package python
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"github.com/databricks/databricks-sdk-go/logger"
"github.com/fatih/color"
"strings"
"github.com/databricks/cli/libs/python"
"github.com/databricks/databricks-sdk-go/logger"
"github.com/databricks/cli/bundle/env"
@ -175,11 +169,7 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
return dyn.InvalidValue, diag.Errorf("failed to write input file: %s", err)
}
stderrBuf := bytes.Buffer{}
stderrWriter := io.MultiWriter(
newLogWriter(ctx, "stderr: "),
&stderrBuf,
)
stderrWriter := newLogWriter(ctx, "stderr: ")
stdoutWriter := newLogWriter(ctx, "stdout: ")
_, processErr := process.Background(
@ -207,13 +197,7 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
// process can fail without reporting errors in diagnostics file or creating it, for instance,
// venv doesn't have PyDABs library installed
if processErr != nil {
diagnostic := diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("python mutator process failed: %q, use --debug to enable logging", processErr),
Detail: explainProcessErr(stderrBuf.String()),
}
return dyn.InvalidValue, diag.Diagnostics{diagnostic}
return dyn.InvalidValue, diag.Errorf("python mutator process failed: %sw, use --debug to enable logging", processErr)
}
// or we can fail to read diagnostics file, that should always be created
@ -221,40 +205,15 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
}
output, outputDiags := loadOutputFile(rootPath, outputPath)
pythonDiagnostics = pythonDiagnostics.Extend(outputDiags)
output, err := loadOutputFile(rootPath, outputPath)
if err != nil {
return dyn.InvalidValue, diag.Errorf("failed to load Python mutator output: %s", err)
}
// we pass through pythonDiagnostic because it contains warnings
return output, pythonDiagnostics
}
const installExplanation = `If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
and that the wheel is installed in the Python environment:
$ .venv/bin/pip install -e .
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
or activate the environment before running CLI commands:
experimental:
pydabs:
venv_path: .venv
`
// explainProcessErr provides additional explanation for common errors.
// It's meant to be the best effort, and not all errors are covered.
// Output should be used only used for error reporting.
func explainProcessErr(stderr string) string {
// implemented in cpython/Lib/runpy.py and portable across Python 3.x, including pypy
if strings.Contains(stderr, "Error while finding module specification for 'databricks.bundles.build'") {
summary := color.CyanString("Explanation: ") + "'databricks-pydabs' library is not installed in the Python environment.\n"
return stderr + "\n" + summary + "\n" + installExplanation
}
return stderr
}
func writeInputFile(inputPath string, input dyn.Value) error {
// we need to marshal dyn.Value instead of bundle.Config to JSON to support
// non-string fields assigned with bundle variables
@ -266,10 +225,10 @@ func writeInputFile(inputPath string, input dyn.Value) error {
return os.WriteFile(inputPath, rootConfigJson, 0600)
}
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnostics) {
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) {
outputFile, err := os.Open(outputPath)
if err != nil {
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err))
return dyn.InvalidValue, fmt.Errorf("failed to open output file: %w", err)
}
defer outputFile.Close()
@ -284,34 +243,27 @@ func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnos
// for that, we pass virtualPath instead of outputPath as file location
virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_pydabs__.yml"))
if err != nil {
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to get absolute path: %w", err))
return dyn.InvalidValue, fmt.Errorf("failed to get absolute path: %w", err)
}
generated, err := yamlloader.LoadYAML(virtualPath, outputFile)
if err != nil {
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse output file: %w", err))
return dyn.InvalidValue, fmt.Errorf("failed to parse output file: %w", err)
}
return strictNormalize(config.Root{}, generated)
}
func strictNormalize(dst any, generated dyn.Value) (dyn.Value, diag.Diagnostics) {
normalized, diags := convert.Normalize(dst, generated)
normalized, diagnostic := convert.Normalize(config.Root{}, generated)
if diagnostic.Error() != nil {
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %w", diagnostic.Error())
}
// warnings shouldn't happen because output should be already normalized
// when it happens, it's a bug in the mutator, and should be treated as an error
strictDiags := diag.Diagnostics{}
for _, d := range diags {
if d.Severity == diag.Warning {
d.Severity = diag.Error
}
strictDiags = strictDiags.Append(d)
for _, d := range diagnostic.Filter(diag.Warning) {
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %s", d.Summary)
}
return normalized, strictDiags
return normalized, nil
}
// loadDiagnosticsFile loads diagnostics from a file.

View File

@ -10,8 +10,6 @@ import (
"runtime"
"testing"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/bundle/env"
@ -257,7 +255,7 @@ func TestPythonMutator_badOutput(t *testing.T) {
mutator := PythonMutator(PythonMutatorPhaseLoad)
diag := bundle.Apply(ctx, b, mutator)
assert.EqualError(t, diag.Error(), "unknown field: unknown_property")
assert.EqualError(t, diag.Error(), "failed to load Python mutator output: failed to normalize output: unknown field: unknown_property")
}
func TestPythonMutator_disabled(t *testing.T) {
@ -548,46 +546,6 @@ func TestInterpreterPath(t *testing.T) {
}
}
func TestStrictNormalize(t *testing.T) {
// NB: there is no way to trigger diag.Error, so we don't test it
type TestStruct struct {
A int `json:"a"`
}
value := dyn.NewValue(map[string]dyn.Value{"A": dyn.NewValue("abc", nil)}, nil)
_, diags := convert.Normalize(TestStruct{}, value)
_, strictDiags := strictNormalize(TestStruct{}, value)
assert.False(t, diags.HasError())
assert.True(t, strictDiags.HasError())
}
func TestExplainProcessErr(t *testing.T) {
stderr := "/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')\n"
expected := `/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')
Explanation: 'databricks-pydabs' library is not installed in the Python environment.
If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
and that the wheel is installed in the Python environment:
$ .venv/bin/pip install -e .
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
or activate the environment before running CLI commands:
experimental:
pydabs:
venv_path: .venv
`
out := explainProcessErr(stderr)
assert.Equal(t, expected, out)
}
func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context {
ctx := context.Background()
ctx, stub := process.WithStub(ctx)

View File

@ -10,6 +10,7 @@ import (
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/dynvar"
"github.com/databricks/cli/libs/log"
)
type resolveVariableReferences struct {
@ -123,7 +124,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
// We rewrite it here to make the resolution logic simpler.
varPath := dyn.NewPath(dyn.Key("var"))
var diags diag.Diagnostics
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
// Synthesize a copy of the root that has all fields that are present in the type
// but not set in the dynamic value set to their corresponding empty value.
@ -180,13 +180,14 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
// Normalize the result because variable resolution may have been applied to non-string fields.
// For example, a variable reference may have been resolved to a integer.
root, normaliseDiags := convert.Normalize(b.Config, root)
diags = diags.Extend(normaliseDiags)
root, diags := convert.Normalize(b.Config, root)
for _, diag := range diags {
// This occurs when a variable's resolved value is incompatible with the field's type.
// Log a warning until we have a better way to surface these diagnostics to the user.
log.Warnf(ctx, "normalization diagnostic: %s", diag.Summary)
}
return root, nil
})
if err != nil {
diags = diags.Extend(diag.FromErr(err))
}
return diags
return diag.FromErr(err)
}

View File

@ -111,13 +111,6 @@ func inheritEnvVars(ctx context.Context, environ map[string]string) error {
environ["PATH"] = path
}
// Include $AZURE_CONFIG_FILE in set of environment variables to pass along.
// This is set in Azure DevOps by the AzureCLI@2 task.
azureConfigFile, ok := env.Lookup(ctx, "AZURE_CONFIG_FILE")
if ok {
environ["AZURE_CONFIG_FILE"] = azureConfigFile
}
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration
devConfigFile, ok := env.Lookup(ctx, "TF_CLI_CONFIG_FILE")

View File

@ -269,20 +269,19 @@ func TestSetUserAgentExtraEnvVar(t *testing.T) {
}
func TestInheritEnvVars(t *testing.T) {
env := map[string]string{}
t.Setenv("HOME", "/home/testuser")
t.Setenv("PATH", "/foo:/bar")
t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc")
t.Setenv("AZURE_CONFIG_FILE", "/tmp/foo/bar")
ctx := context.Background()
env := map[string]string{}
err := inheritEnvVars(ctx, env)
if assert.NoError(t, err) {
assert.Equal(t, "/home/testuser", env["HOME"])
assert.Equal(t, "/foo:/bar", env["PATH"])
assert.Equal(t, "/tmp/config.tfrc", env["TF_CLI_CONFIG_FILE"])
assert.Equal(t, "/tmp/foo/bar", env["AZURE_CONFIG_FILE"])
}
err := inheritEnvVars(context.Background(), env)
require.NoError(t, err)
require.Equal(t, env["HOME"], "/home/testuser")
require.Equal(t, env["PATH"], "/foo:/bar")
require.Equal(t, env["TF_CLI_CONFIG_FILE"], "/tmp/config.tfrc")
}
func TestSetUserProfileFromInheritEnvVars(t *testing.T) {

View File

@ -72,11 +72,9 @@ func IsLibraryLocal(dep string) bool {
// ^[a-zA-Z0-9\-_]+: Matches the package name, allowing alphanumeric characters, dashes (-), and underscores (_).
// \[.*\])?: Optionally matches any extras specified in square brackets, e.g., [security].
// ((==|!=|<=|>=|~=|>|<)\d+(\.\d+){0,2}(\.\*)?): Optionally matches version specifiers, supporting various operators (==, !=, etc.) followed by a version number (e.g., 2.25.1).
// ,?: Optionally matches a comma (,) at the end of the specifier which is used to separate multiple specifiers.
// There can be multiple version specifiers separated by commas or no specifiers.
// ((==|!=|<=|>=|~=|>|<)\d+(\.\d+){0,2}(\.\*)?)?: Optionally matches version specifiers, supporting various operators (==, !=, etc.) followed by a version number (e.g., 2.25.1).
// Spec for package name and version specifier: https://pip.pypa.io/en/stable/reference/requirement-specifiers/
var packageRegex = regexp.MustCompile(`^[a-zA-Z0-9\-_]+\s?(\[.*\])?\s?((==|!=|<=|>=|~=|==|>|<)\s?\d+(\.\d+){0,2}(\.\*)?,?)*$`)
var packageRegex = regexp.MustCompile(`^[a-zA-Z0-9\-_]+\s?(\[.*\])?\s?((==|!=|<=|>=|~=|==|>|<)\s?\d+(\.\d+){0,2}(\.\*)?)?$`)
func isPackage(name string) bool {
if packageRegex.MatchString(name) {

View File

@ -62,8 +62,6 @@ func TestIsLibraryLocal(t *testing.T) {
{path: "beautifulsoup4 ~= 4.12.3", expected: false},
{path: "beautifulsoup4[security, tests]", expected: false},
{path: "beautifulsoup4[security, tests] ~= 4.12.3", expected: false},
{path: "beautifulsoup4>=1.0.0,<2.0.0", expected: false},
{path: "beautifulsoup4>=1.0.0,~=1.2.0,<2.0.0", expected: false},
{path: "https://github.com/pypa/pip/archive/22.0.2.zip", expected: false},
{path: "pip @ https://github.com/pypa/pip/archive/22.0.2.zip", expected: false},
{path: "requests [security] @ https://github.com/psf/requests/archive/refs/heads/main.zip", expected: false},

View File

@ -2,7 +2,6 @@ package python
import (
"context"
"strconv"
"strings"
"github.com/databricks/cli/bundle"
@ -39,7 +38,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
tasks := libraries.FindTasksWithLocalLibraries(b)
for _, task := range tasks {
if task.NewCluster != nil {
if lowerThanExpectedVersion(task.NewCluster.SparkVersion) {
if lowerThanExpectedVersion(ctx, task.NewCluster.SparkVersion) {
return true
}
}
@ -48,7 +47,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
for _, job := range b.Config.Resources.Jobs {
for _, cluster := range job.JobClusters {
if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster.SparkVersion != "" {
if lowerThanExpectedVersion(cluster.NewCluster.SparkVersion) {
if lowerThanExpectedVersion(ctx, cluster.NewCluster.SparkVersion) {
return true
}
}
@ -65,7 +64,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
return false
}
if lowerThanExpectedVersion(version) {
if lowerThanExpectedVersion(ctx, version) {
return true
}
}
@ -74,7 +73,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
return false
}
func lowerThanExpectedVersion(sparkVersion string) bool {
func lowerThanExpectedVersion(ctx context.Context, sparkVersion string) bool {
parts := strings.Split(sparkVersion, ".")
if len(parts) < 2 {
return false
@ -83,17 +82,6 @@ func lowerThanExpectedVersion(sparkVersion string) bool {
if parts[1][0] == 'x' { // treat versions like 13.x as the very latest minor (13.99)
parts[1] = "99"
}
// if any of the version parts are not numbers, we can't compare
// so consider it as compatible version
if _, err := strconv.Atoi(parts[0]); err != nil {
return false
}
if _, err := strconv.Atoi(parts[1]); err != nil {
return false
}
v := "v" + parts[0] + "." + parts[1]
return semver.Compare(v, "v13.1") < 0
}

View File

@ -344,8 +344,6 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
"14.1.x-scala2.12": false,
"13.x-snapshot-scala-2.12": false,
"13.x-rc-scala-2.12": false,
"client.1.10-scala2.12": false,
"latest-stable-gpu-scala2.11": false,
"10.4.x-aarch64-photon-scala2.12": true,
"10.4.x-scala2.12": true,
"13.0.x-scala2.12": true,
@ -353,7 +351,7 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
}
for k, v := range testCases {
result := lowerThanExpectedVersion(k)
result := lowerThanExpectedVersion(context.Background(), k)
require.Equal(t, v, result, k)
}
}

View File

@ -127,55 +127,6 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
}
func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
require.NoError(t, err)
// deploy pipeline
err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
// assert pipeline is created
pipelineName := "test-bundle-pipeline-" + uniqueId
pipeline, err := w.Pipelines.GetByName(ctx, pipelineName)
require.NoError(t, err)
assert.Equal(t, pipeline.Name, pipelineName)
// assert job is created
jobName := "test-bundle-job-" + uniqueId
job, err := w.Jobs.GetBySettingsName(ctx, jobName)
require.NoError(t, err)
assert.Equal(t, job.Settings.Name, jobName)
// delete resources.yml
err = os.Remove(filepath.Join(bundleRoot, "resources.yml"))
require.NoError(t, err)
// Redeploy the bundle. Expect it to fail because deleting the pipeline requires --auto-approve.
t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock")
stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following DLT Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
properties such as the 'catalog' or 'storage' are changed:
delete pipeline bar`)
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
}
func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
@ -206,6 +157,9 @@ func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
fmt.Println("expected: ", "This action will result in the deletion or recreation of the following DLT Pipelines along with the Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline properties such as the `catalog` or `storage` are changed::\n recreate pipeline foo")
fmt.Println("stderr: ", stderr.String())
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following DLT Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline

View File

@ -267,8 +267,6 @@ func (n normalizeOptions) normalizeString(typ reflect.Type, src dyn.Value, path
out = strconv.FormatInt(src.MustInt(), 10)
case dyn.KindFloat:
out = strconv.FormatFloat(src.MustFloat(), 'f', -1, 64)
case dyn.KindTime:
out = src.MustTime().String()
case dyn.KindNil:
// Return a warning if the field is present but has a null value.
return dyn.InvalidValue, diags.Append(nullWarning(dyn.KindString, src, path))

View File

@ -569,14 +569,6 @@ func TestNormalizeStringFromFloat(t *testing.T) {
assert.Equal(t, dyn.NewValue("1.2", vin.Locations()), vout)
}
func TestNormalizeStringFromTime(t *testing.T) {
var typ string
vin := dyn.NewValue(dyn.MustTime("2024-08-29"), []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Empty(t, err)
assert.Equal(t, dyn.NewValue("2024-08-29", vin.Locations()), vout)
}
func TestNormalizeStringError(t *testing.T) {
var typ string
vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")})

View File

@ -2,6 +2,7 @@ package dyn
import (
"fmt"
"time"
)
type Kind int
@ -33,7 +34,7 @@ func kindOf(v any) Kind {
return KindInt
case float32, float64:
return KindFloat
case Time:
case time.Time:
return KindTime
case nil:
return KindNil

View File

@ -83,16 +83,16 @@ func TestOverride_Primitive(t *testing.T) {
{
name: "time (updated)",
state: visitorState{updated: []string{"root"}},
left: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
right: dyn.NewValue(dyn.FromTime(time.UnixMilli(10001)), []dyn.Location{rightLocation}),
expected: dyn.NewValue(dyn.FromTime(time.UnixMilli(10001)), []dyn.Location{rightLocation}),
left: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}),
right: dyn.NewValue(time.UnixMilli(10001), []dyn.Location{rightLocation}),
expected: dyn.NewValue(time.UnixMilli(10001), []dyn.Location{rightLocation}),
},
{
name: "time (not updated)",
state: visitorState{},
left: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
right: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{rightLocation}),
expected: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
left: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}),
right: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{rightLocation}),
expected: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}),
},
{
name: "different types (updated)",

View File

@ -1,62 +0,0 @@
package dyn
import (
"fmt"
"time"
)
// Time represents a time-like primitive value.
//
// It represents a timestamp and includes the original string value
// that was parsed to create the timestamp. This makes it possible
// to coalesce a value that YAML interprets as a timestamp back into
// a string without losing information.
type Time struct {
t time.Time
s string
}
// NewTime creates a new Time from the given string.
func NewTime(str string) (Time, error) {
// Try a couple of layouts
for _, layout := range []string{
"2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
"2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
"2006-1-2 15:4:5.999999999", // space separated with no time zone
"2006-1-2", // date only
} {
t, terr := time.Parse(layout, str)
if terr == nil {
return Time{t: t, s: str}, nil
}
}
return Time{}, fmt.Errorf("invalid time value: %q", str)
}
// MustTime creates a new Time from the given string.
// It panics if the string cannot be parsed.
func MustTime(str string) Time {
t, err := NewTime(str)
if err != nil {
panic(err)
}
return t
}
// FromTime creates a new Time from the given time.Time.
// It uses the RFC3339Nano format for its string representation.
// This guarantees that it can roundtrip into a string without losing information.
func FromTime(t time.Time) Time {
return Time{t: t, s: t.Format(time.RFC3339Nano)}
}
// Time returns the time.Time value.
func (t Time) Time() time.Time {
return t.t
}
// String returns the original string value that was parsed to create the timestamp.
func (t Time) String() string {
return t.s
}

View File

@ -1,41 +0,0 @@
package dyn_test
import (
"testing"
"time"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
)
func TestTimeValid(t *testing.T) {
for _, tc := range []string{
"2024-08-29",
"2024-01-15T12:34:56.789012345Z",
} {
tm, err := dyn.NewTime(tc)
if assert.NoError(t, err) {
assert.NotEqual(t, time.Time{}, tm.Time())
assert.Equal(t, tc, tm.String())
}
}
}
func TestTimeInvalid(t *testing.T) {
tm, err := dyn.NewTime("invalid")
assert.Error(t, err)
assert.Equal(t, dyn.Time{}, tm)
}
func TestTimeFromTime(t *testing.T) {
tref := time.Now()
t1 := dyn.FromTime(tref)
// Verify that the underlying value is the same.
assert.Equal(t, tref, t1.Time())
// Verify that the string representation can be used to construct the same.
t2, err := dyn.NewTime(t1.String())
assert.NoError(t, err)
assert.True(t, t1.Time().Equal(t2.Time()))
}

View File

@ -127,8 +127,7 @@ func (v Value) AsAny() any {
case KindFloat:
return v.v
case KindTime:
t := v.v.(Time)
return t.Time()
return v.v
default:
// Panic because we only want to deal with known types.
panic(fmt.Sprintf("invalid kind: %d", v.k))

View File

@ -2,6 +2,7 @@ package dyn
import (
"fmt"
"time"
)
// AsMap returns the underlying mapping if this value is a map,
@ -122,14 +123,14 @@ func (v Value) MustFloat() float64 {
// AsTime returns the underlying time if this value is a time,
// the zero value and false otherwise.
func (v Value) AsTime() (Time, bool) {
vv, ok := v.v.(Time)
func (v Value) AsTime() (time.Time, bool) {
vv, ok := v.v.(time.Time)
return vv, ok
}
// MustTime returns the underlying time if this value is a time,
// panics otherwise.
func (v Value) MustTime() Time {
func (v Value) MustTime() time.Time {
vv, ok := v.AsTime()
if !ok || v.k != KindTime {
panic(fmt.Sprintf("expected kind %s, got %s", KindTime, v.k))

View File

@ -143,7 +143,7 @@ func TestValueUnderlyingFloat(t *testing.T) {
}
func TestValueUnderlyingTime(t *testing.T) {
v := dyn.V(dyn.FromTime(time.Now()))
v := dyn.V(time.Now())
vv1, ok := v.AsTime()
assert.True(t, ok)

View File

@ -5,6 +5,7 @@ import (
"math"
"strconv"
"strings"
"time"
"github.com/databricks/cli/libs/dyn"
"gopkg.in/yaml.v3"
@ -206,9 +207,17 @@ func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error
case "!!null":
return dyn.NewValue(nil, []dyn.Location{loc}), nil
case "!!timestamp":
t, err := dyn.NewTime(node.Value)
if err == nil {
return dyn.NewValue(t, []dyn.Location{loc}), nil
// Try a couple of layouts
for _, layout := range []string{
"2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
"2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
"2006-1-2 15:4:5.999999999", // space separated with no time zone
"2006-1-2", // date only
} {
t, terr := time.Parse(layout, node.Value)
if terr == nil {
return dyn.NewValue(t, []dyn.Location{loc}), nil
}
}
return dyn.InvalidValue, errorf(loc, "invalid timestamp value: %v", node.Value)
default:

View File

@ -129,7 +129,7 @@ func (s *saver) toYamlNodeWithStyle(v dyn.Value, style yaml.Style) (*yaml.Node,
case dyn.KindFloat:
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustFloat()), Style: style}, nil
case dyn.KindTime:
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustTime().String(), Style: style}, nil
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustTime().UTC().String(), Style: style}, nil
default:
// Panic because we only want to deal with known types.
panic(fmt.Sprintf("invalid kind: %d", v.Kind()))

View File

@ -2,10 +2,10 @@ package yamlsaver
import (
"testing"
"time"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/stretchr/testify/require"
"gopkg.in/yaml.v3"
)
@ -45,14 +45,11 @@ func TestMarshalBoolValue(t *testing.T) {
}
func TestMarshalTimeValue(t *testing.T) {
tm, err := dyn.NewTime("1970-01-01")
require.NoError(t, err)
s := NewSaver()
var timeValue = dyn.V(tm)
var timeValue = dyn.V(time.Unix(0, 0))
v, err := s.toYamlNode(timeValue)
assert.NoError(t, err)
assert.Equal(t, "1970-01-01", v.Value)
assert.Equal(t, "1970-01-01 00:00:00 +0000 UTC", v.Value)
assert.Equal(t, yaml.ScalarNode, v.Kind)
}

View File

@ -1,7 +1,7 @@
{{define "latest_lts_dbr_version" -}}
15.4.x-scala2.12
13.3.x-scala2.12
{{- end}}
{{define "latest_lts_db_connect_version_spec" -}}
>=15.4,<15.5
>=13.3,<13.4
{{- end}}

View File

@ -1,7 +1,7 @@
{{define "latest_lts_dbr_version" -}}
15.4.x-scala2.12
13.3.x-scala2.12
{{- end}}
{{define "latest_lts_db_connect_version_spec" -}}
>=15.4,<15.5
>=13.3,<13.4
{{- end}}

View File

@ -1,7 +1,7 @@
{{define "latest_lts_dbr_version" -}}
15.4.x-scala2.12
13.3.x-scala2.12
{{- end}}
{{define "latest_lts_db_connect_version_spec" -}}
>=15.4,<15.5
>=13.3,<13.4
{{- end}}

View File

@ -1,9 +1,10 @@
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
{{- /* We can't use a materialized view here since they don't support 'create or refresh' yet.*/}}
USE CATALOG {{"{{"}}catalog{{"}}"}};
USE IDENTIFIER({{"{{"}}schema{{"}}"}});
CREATE OR REPLACE MATERIALIZED VIEW
CREATE OR REPLACE VIEW
orders_daily
AS SELECT
order_date, count(*) AS number_of_orders