mirror of https://github.com/databricks/cli.git
Compare commits
14 Commits
d9ed8ebeae
...
2f7fa97601
Author | SHA1 | Date |
---|---|---|
Shreyas Goenka | 2f7fa97601 | |
Shreyas Goenka | 8d2c2ab8d3 | |
Gleb Kanterov | ed448815b4 | |
Andrew Nester | 582558cac2 | |
shreyas-goenka | 5d9910c8e0 | |
Gleb Kanterov | 70ce802518 | |
Lennart Kats (databricks) | ed4a4585c0 | |
Lennart Kats (databricks) | 2e000f1ebd | |
Pieter Noordhuis | 5fac7edcdf | |
Pieter Noordhuis | 0f4891f0fe | |
Andrew Nester | 43ace69bb9 | |
Andrew Nester | 7dcc791b05 | |
Lennart Kats (databricks) | 85459c1963 | |
Andrew Nester | 70363836d5 |
23
CHANGELOG.md
23
CHANGELOG.md
|
@ -1,5 +1,28 @@
|
|||
# Version changelog
|
||||
|
||||
## [Release] Release v0.227.1
|
||||
|
||||
CLI:
|
||||
* Disable prompt for storage-credentials get command ([#1723](https://github.com/databricks/cli/pull/1723)).
|
||||
|
||||
Bundles:
|
||||
* Do not treat empty path as a local path ([#1717](https://github.com/databricks/cli/pull/1717)).
|
||||
* Correctly mark PyPI package name specs with multiple specifiers as remote libraries ([#1725](https://github.com/databricks/cli/pull/1725)).
|
||||
* Improve error handling for /Volumes paths in mode: development ([#1716](https://github.com/databricks/cli/pull/1716)).
|
||||
|
||||
Internal:
|
||||
* Ignore CLI version check on development builds of the CLI ([#1714](https://github.com/databricks/cli/pull/1714)).
|
||||
|
||||
API Changes:
|
||||
* Added `databricks resource-quotas` command group.
|
||||
* Added `databricks policy-compliance-for-clusters` command group.
|
||||
* Added `databricks policy-compliance-for-jobs` command group.
|
||||
|
||||
OpenAPI commit 3eae49b444cac5a0118a3503e5b7ecef7f96527a (2024-08-21)
|
||||
Dependency updates:
|
||||
* Bump github.com/databricks/databricks-sdk-go from 0.44.0 to 0.45.0 ([#1719](https://github.com/databricks/cli/pull/1719)).
|
||||
* Revert hc-install version to 0.7.0 ([#1711](https://github.com/databricks/cli/pull/1711)).
|
||||
|
||||
## [Release] Release v0.227.0
|
||||
|
||||
CLI:
|
||||
|
|
|
@ -6,5 +6,5 @@ type Deployment struct {
|
|||
FailOnActiveRuns bool `json:"fail_on_active_runs,omitempty"`
|
||||
|
||||
// Lock configures locking behavior on deployment.
|
||||
Lock Lock `json:"lock"`
|
||||
Lock Lock `json:"lock,omitempty"`
|
||||
}
|
||||
|
|
|
@ -64,6 +64,7 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) {
|
|||
}
|
||||
|
||||
func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
|
||||
var diags diag.Diagnostics
|
||||
p := b.Config.Presets
|
||||
u := b.Config.Workspace.CurrentUser
|
||||
|
||||
|
@ -74,44 +75,56 @@ func validateDevelopmentMode(b *bundle.Bundle) diag.Diagnostics {
|
|||
// status to UNPAUSED at the level of an individual object, whic hwas
|
||||
// historically allowed.)
|
||||
if p.TriggerPauseStatus == config.Unpaused {
|
||||
return diag.Diagnostics{{
|
||||
diags = diags.Append(diag.Diagnostic{
|
||||
Severity: diag.Error,
|
||||
Summary: "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default",
|
||||
Locations: []dyn.Location{b.Config.GetLocation("presets.trigger_pause_status")},
|
||||
}}
|
||||
})
|
||||
}
|
||||
|
||||
// Make sure this development copy has unique names and paths to avoid conflicts
|
||||
if path := findNonUserPath(b); path != "" {
|
||||
return diag.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path)
|
||||
if path == "artifact_path" && strings.HasPrefix(b.Config.Workspace.ArtifactPath, "/Volumes") {
|
||||
// For Volumes paths we recommend including the current username as a substring
|
||||
diags = diags.Extend(diag.Errorf("%s should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'", path))
|
||||
} else {
|
||||
// For non-Volumes paths recommend simply putting things in the home folder
|
||||
diags = diags.Extend(diag.Errorf("%s must start with '~/' or contain the current username to ensure uniqueness when using 'mode: development'", path))
|
||||
}
|
||||
}
|
||||
if p.NamePrefix != "" && !strings.Contains(p.NamePrefix, u.ShortName) && !strings.Contains(p.NamePrefix, u.UserName) {
|
||||
// Resources such as pipelines require a unique name, e.g. '[dev steve] my_pipeline'.
|
||||
// For this reason we require the name prefix to contain the current username;
|
||||
// it's a pitfall for users if they don't include it and later find out that
|
||||
// only a single user can do development deployments.
|
||||
return diag.Diagnostics{{
|
||||
diags = diags.Append(diag.Diagnostic{
|
||||
Severity: diag.Error,
|
||||
Summary: "prefix should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'",
|
||||
Locations: []dyn.Location{b.Config.GetLocation("presets.name_prefix")},
|
||||
}}
|
||||
})
|
||||
}
|
||||
return nil
|
||||
return diags
|
||||
}
|
||||
|
||||
// findNonUserPath finds the first workspace path such as root_path that doesn't
|
||||
// contain the current username or current user's shortname.
|
||||
func findNonUserPath(b *bundle.Bundle) string {
|
||||
username := b.Config.Workspace.CurrentUser.UserName
|
||||
containsName := func(path string) bool {
|
||||
username := b.Config.Workspace.CurrentUser.UserName
|
||||
shortname := b.Config.Workspace.CurrentUser.ShortName
|
||||
return strings.Contains(path, username) || strings.Contains(path, shortname)
|
||||
}
|
||||
|
||||
if b.Config.Workspace.RootPath != "" && !strings.Contains(b.Config.Workspace.RootPath, username) {
|
||||
if b.Config.Workspace.RootPath != "" && !containsName(b.Config.Workspace.RootPath) {
|
||||
return "root_path"
|
||||
}
|
||||
if b.Config.Workspace.StatePath != "" && !strings.Contains(b.Config.Workspace.StatePath, username) {
|
||||
if b.Config.Workspace.StatePath != "" && !containsName(b.Config.Workspace.StatePath) {
|
||||
return "state_path"
|
||||
}
|
||||
if b.Config.Workspace.FilePath != "" && !strings.Contains(b.Config.Workspace.FilePath, username) {
|
||||
if b.Config.Workspace.FilePath != "" && !containsName(b.Config.Workspace.FilePath) {
|
||||
return "file_path"
|
||||
}
|
||||
if b.Config.Workspace.ArtifactPath != "" && !strings.Contains(b.Config.Workspace.ArtifactPath, username) {
|
||||
if b.Config.Workspace.ArtifactPath != "" && !containsName(b.Config.Workspace.ArtifactPath) {
|
||||
return "artifact_path"
|
||||
}
|
||||
return ""
|
||||
|
|
|
@ -230,10 +230,20 @@ func TestValidateDevelopmentMode(t *testing.T) {
|
|||
diags := validateDevelopmentMode(b)
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
// Test with /Volumes path
|
||||
b = mockBundle(config.Development)
|
||||
b.Config.Workspace.ArtifactPath = "/Volumes/catalog/schema/lennart/libs"
|
||||
diags = validateDevelopmentMode(b)
|
||||
require.NoError(t, diags.Error())
|
||||
b.Config.Workspace.ArtifactPath = "/Volumes/catalog/schema/libs"
|
||||
diags = validateDevelopmentMode(b)
|
||||
require.ErrorContains(t, diags.Error(), "artifact_path should contain the current username or ${workspace.current_user.short_name} to ensure uniqueness when using 'mode: development'")
|
||||
|
||||
// Test with a bundle that has a non-user path
|
||||
b = mockBundle(config.Development)
|
||||
b.Config.Workspace.RootPath = "/Shared/.bundle/x/y/state"
|
||||
diags = validateDevelopmentMode(b)
|
||||
require.ErrorContains(t, diags.Error(), "root_path")
|
||||
require.ErrorContains(t, diags.Error(), "root_path must start with '~/' or contain the current username to ensure uniqueness when using 'mode: development'")
|
||||
|
||||
// Test with a bundle that has an unpaused trigger pause status
|
||||
b = mockBundle(config.Development)
|
||||
|
|
|
@ -1,15 +1,21 @@
|
|||
package python
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/databricks/cli/libs/python"
|
||||
"github.com/databricks/databricks-sdk-go/logger"
|
||||
"github.com/fatih/color"
|
||||
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/libs/python"
|
||||
|
||||
"github.com/databricks/cli/bundle/env"
|
||||
|
||||
|
@ -169,7 +175,11 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
|
|||
return dyn.InvalidValue, diag.Errorf("failed to write input file: %s", err)
|
||||
}
|
||||
|
||||
stderrWriter := newLogWriter(ctx, "stderr: ")
|
||||
stderrBuf := bytes.Buffer{}
|
||||
stderrWriter := io.MultiWriter(
|
||||
newLogWriter(ctx, "stderr: "),
|
||||
&stderrBuf,
|
||||
)
|
||||
stdoutWriter := newLogWriter(ctx, "stdout: ")
|
||||
|
||||
_, processErr := process.Background(
|
||||
|
@ -197,7 +207,13 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
|
|||
// process can fail without reporting errors in diagnostics file or creating it, for instance,
|
||||
// venv doesn't have PyDABs library installed
|
||||
if processErr != nil {
|
||||
return dyn.InvalidValue, diag.Errorf("python mutator process failed: %sw, use --debug to enable logging", processErr)
|
||||
diagnostic := diag.Diagnostic{
|
||||
Severity: diag.Error,
|
||||
Summary: fmt.Sprintf("python mutator process failed: %q, use --debug to enable logging", processErr),
|
||||
Detail: explainProcessErr(stderrBuf.String()),
|
||||
}
|
||||
|
||||
return dyn.InvalidValue, diag.Diagnostics{diagnostic}
|
||||
}
|
||||
|
||||
// or we can fail to read diagnostics file, that should always be created
|
||||
|
@ -205,15 +221,40 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
|
|||
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
|
||||
}
|
||||
|
||||
output, err := loadOutputFile(rootPath, outputPath)
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, diag.Errorf("failed to load Python mutator output: %s", err)
|
||||
}
|
||||
output, outputDiags := loadOutputFile(rootPath, outputPath)
|
||||
pythonDiagnostics = pythonDiagnostics.Extend(outputDiags)
|
||||
|
||||
// we pass through pythonDiagnostic because it contains warnings
|
||||
return output, pythonDiagnostics
|
||||
}
|
||||
|
||||
const installExplanation = `If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
|
||||
and that the wheel is installed in the Python environment:
|
||||
|
||||
$ .venv/bin/pip install -e .
|
||||
|
||||
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
|
||||
or activate the environment before running CLI commands:
|
||||
|
||||
experimental:
|
||||
pydabs:
|
||||
venv_path: .venv
|
||||
`
|
||||
|
||||
// explainProcessErr provides additional explanation for common errors.
|
||||
// It's meant to be the best effort, and not all errors are covered.
|
||||
// Output should be used only used for error reporting.
|
||||
func explainProcessErr(stderr string) string {
|
||||
// implemented in cpython/Lib/runpy.py and portable across Python 3.x, including pypy
|
||||
if strings.Contains(stderr, "Error while finding module specification for 'databricks.bundles.build'") {
|
||||
summary := color.CyanString("Explanation: ") + "'databricks-pydabs' library is not installed in the Python environment.\n"
|
||||
|
||||
return stderr + "\n" + summary + "\n" + installExplanation
|
||||
}
|
||||
|
||||
return stderr
|
||||
}
|
||||
|
||||
func writeInputFile(inputPath string, input dyn.Value) error {
|
||||
// we need to marshal dyn.Value instead of bundle.Config to JSON to support
|
||||
// non-string fields assigned with bundle variables
|
||||
|
@ -225,10 +266,10 @@ func writeInputFile(inputPath string, input dyn.Value) error {
|
|||
return os.WriteFile(inputPath, rootConfigJson, 0600)
|
||||
}
|
||||
|
||||
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) {
|
||||
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnostics) {
|
||||
outputFile, err := os.Open(outputPath)
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, fmt.Errorf("failed to open output file: %w", err)
|
||||
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err))
|
||||
}
|
||||
|
||||
defer outputFile.Close()
|
||||
|
@ -243,27 +284,34 @@ func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) {
|
|||
// for that, we pass virtualPath instead of outputPath as file location
|
||||
virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_pydabs__.yml"))
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, fmt.Errorf("failed to get absolute path: %w", err)
|
||||
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to get absolute path: %w", err))
|
||||
}
|
||||
|
||||
generated, err := yamlloader.LoadYAML(virtualPath, outputFile)
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, fmt.Errorf("failed to parse output file: %w", err)
|
||||
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse output file: %w", err))
|
||||
}
|
||||
|
||||
normalized, diagnostic := convert.Normalize(config.Root{}, generated)
|
||||
if diagnostic.Error() != nil {
|
||||
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %w", diagnostic.Error())
|
||||
}
|
||||
return strictNormalize(config.Root{}, generated)
|
||||
}
|
||||
|
||||
func strictNormalize(dst any, generated dyn.Value) (dyn.Value, diag.Diagnostics) {
|
||||
normalized, diags := convert.Normalize(dst, generated)
|
||||
|
||||
// warnings shouldn't happen because output should be already normalized
|
||||
// when it happens, it's a bug in the mutator, and should be treated as an error
|
||||
|
||||
for _, d := range diagnostic.Filter(diag.Warning) {
|
||||
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %s", d.Summary)
|
||||
strictDiags := diag.Diagnostics{}
|
||||
|
||||
for _, d := range diags {
|
||||
if d.Severity == diag.Warning {
|
||||
d.Severity = diag.Error
|
||||
}
|
||||
|
||||
strictDiags = strictDiags.Append(d)
|
||||
}
|
||||
|
||||
return normalized, nil
|
||||
return normalized, strictDiags
|
||||
}
|
||||
|
||||
// loadDiagnosticsFile loads diagnostics from a file.
|
||||
|
|
|
@ -10,6 +10,8 @@ import (
|
|||
"runtime"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn/convert"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn/merge"
|
||||
|
||||
"github.com/databricks/cli/bundle/env"
|
||||
|
@ -255,7 +257,7 @@ func TestPythonMutator_badOutput(t *testing.T) {
|
|||
mutator := PythonMutator(PythonMutatorPhaseLoad)
|
||||
diag := bundle.Apply(ctx, b, mutator)
|
||||
|
||||
assert.EqualError(t, diag.Error(), "failed to load Python mutator output: failed to normalize output: unknown field: unknown_property")
|
||||
assert.EqualError(t, diag.Error(), "unknown field: unknown_property")
|
||||
}
|
||||
|
||||
func TestPythonMutator_disabled(t *testing.T) {
|
||||
|
@ -546,6 +548,46 @@ func TestInterpreterPath(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestStrictNormalize(t *testing.T) {
|
||||
// NB: there is no way to trigger diag.Error, so we don't test it
|
||||
|
||||
type TestStruct struct {
|
||||
A int `json:"a"`
|
||||
}
|
||||
|
||||
value := dyn.NewValue(map[string]dyn.Value{"A": dyn.NewValue("abc", nil)}, nil)
|
||||
|
||||
_, diags := convert.Normalize(TestStruct{}, value)
|
||||
_, strictDiags := strictNormalize(TestStruct{}, value)
|
||||
|
||||
assert.False(t, diags.HasError())
|
||||
assert.True(t, strictDiags.HasError())
|
||||
}
|
||||
|
||||
func TestExplainProcessErr(t *testing.T) {
|
||||
stderr := "/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')\n"
|
||||
expected := `/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')
|
||||
|
||||
Explanation: 'databricks-pydabs' library is not installed in the Python environment.
|
||||
|
||||
If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
|
||||
and that the wheel is installed in the Python environment:
|
||||
|
||||
$ .venv/bin/pip install -e .
|
||||
|
||||
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
|
||||
or activate the environment before running CLI commands:
|
||||
|
||||
experimental:
|
||||
pydabs:
|
||||
venv_path: .venv
|
||||
`
|
||||
|
||||
out := explainProcessErr(stderr)
|
||||
|
||||
assert.Equal(t, expected, out)
|
||||
}
|
||||
|
||||
func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context {
|
||||
ctx := context.Background()
|
||||
ctx, stub := process.WithStub(ctx)
|
||||
|
|
|
@ -10,7 +10,6 @@ import (
|
|||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/dyn/convert"
|
||||
"github.com/databricks/cli/libs/dyn/dynvar"
|
||||
"github.com/databricks/cli/libs/log"
|
||||
)
|
||||
|
||||
type resolveVariableReferences struct {
|
||||
|
@ -124,6 +123,7 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
|
|||
// We rewrite it here to make the resolution logic simpler.
|
||||
varPath := dyn.NewPath(dyn.Key("var"))
|
||||
|
||||
var diags diag.Diagnostics
|
||||
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
||||
// Synthesize a copy of the root that has all fields that are present in the type
|
||||
// but not set in the dynamic value set to their corresponding empty value.
|
||||
|
@ -180,14 +180,13 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
|
|||
|
||||
// Normalize the result because variable resolution may have been applied to non-string fields.
|
||||
// For example, a variable reference may have been resolved to a integer.
|
||||
root, diags := convert.Normalize(b.Config, root)
|
||||
for _, diag := range diags {
|
||||
// This occurs when a variable's resolved value is incompatible with the field's type.
|
||||
// Log a warning until we have a better way to surface these diagnostics to the user.
|
||||
log.Warnf(ctx, "normalization diagnostic: %s", diag.Summary)
|
||||
}
|
||||
root, normaliseDiags := convert.Normalize(b.Config, root)
|
||||
diags = diags.Extend(normaliseDiags)
|
||||
return root, nil
|
||||
})
|
||||
|
||||
return diag.FromErr(err)
|
||||
if err != nil {
|
||||
diags = diags.Extend(diag.FromErr(err))
|
||||
}
|
||||
return diags
|
||||
}
|
||||
|
|
|
@ -111,6 +111,13 @@ func inheritEnvVars(ctx context.Context, environ map[string]string) error {
|
|||
environ["PATH"] = path
|
||||
}
|
||||
|
||||
// Include $AZURE_CONFIG_FILE in set of environment variables to pass along.
|
||||
// This is set in Azure DevOps by the AzureCLI@2 task.
|
||||
azureConfigFile, ok := env.Lookup(ctx, "AZURE_CONFIG_FILE")
|
||||
if ok {
|
||||
environ["AZURE_CONFIG_FILE"] = azureConfigFile
|
||||
}
|
||||
|
||||
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
|
||||
// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration
|
||||
devConfigFile, ok := env.Lookup(ctx, "TF_CLI_CONFIG_FILE")
|
||||
|
|
|
@ -269,19 +269,20 @@ func TestSetUserAgentExtraEnvVar(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestInheritEnvVars(t *testing.T) {
|
||||
env := map[string]string{}
|
||||
|
||||
t.Setenv("HOME", "/home/testuser")
|
||||
t.Setenv("PATH", "/foo:/bar")
|
||||
t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc")
|
||||
t.Setenv("AZURE_CONFIG_FILE", "/tmp/foo/bar")
|
||||
|
||||
err := inheritEnvVars(context.Background(), env)
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, env["HOME"], "/home/testuser")
|
||||
require.Equal(t, env["PATH"], "/foo:/bar")
|
||||
require.Equal(t, env["TF_CLI_CONFIG_FILE"], "/tmp/config.tfrc")
|
||||
ctx := context.Background()
|
||||
env := map[string]string{}
|
||||
err := inheritEnvVars(ctx, env)
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, "/home/testuser", env["HOME"])
|
||||
assert.Equal(t, "/foo:/bar", env["PATH"])
|
||||
assert.Equal(t, "/tmp/config.tfrc", env["TF_CLI_CONFIG_FILE"])
|
||||
assert.Equal(t, "/tmp/foo/bar", env["AZURE_CONFIG_FILE"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetUserProfileFromInheritEnvVars(t *testing.T) {
|
||||
|
|
|
@ -72,9 +72,11 @@ func IsLibraryLocal(dep string) bool {
|
|||
|
||||
// ^[a-zA-Z0-9\-_]+: Matches the package name, allowing alphanumeric characters, dashes (-), and underscores (_).
|
||||
// \[.*\])?: Optionally matches any extras specified in square brackets, e.g., [security].
|
||||
// ((==|!=|<=|>=|~=|>|<)\d+(\.\d+){0,2}(\.\*)?)?: Optionally matches version specifiers, supporting various operators (==, !=, etc.) followed by a version number (e.g., 2.25.1).
|
||||
// ((==|!=|<=|>=|~=|>|<)\d+(\.\d+){0,2}(\.\*)?): Optionally matches version specifiers, supporting various operators (==, !=, etc.) followed by a version number (e.g., 2.25.1).
|
||||
// ,?: Optionally matches a comma (,) at the end of the specifier which is used to separate multiple specifiers.
|
||||
// There can be multiple version specifiers separated by commas or no specifiers.
|
||||
// Spec for package name and version specifier: https://pip.pypa.io/en/stable/reference/requirement-specifiers/
|
||||
var packageRegex = regexp.MustCompile(`^[a-zA-Z0-9\-_]+\s?(\[.*\])?\s?((==|!=|<=|>=|~=|==|>|<)\s?\d+(\.\d+){0,2}(\.\*)?)?$`)
|
||||
var packageRegex = regexp.MustCompile(`^[a-zA-Z0-9\-_]+\s?(\[.*\])?\s?((==|!=|<=|>=|~=|==|>|<)\s?\d+(\.\d+){0,2}(\.\*)?,?)*$`)
|
||||
|
||||
func isPackage(name string) bool {
|
||||
if packageRegex.MatchString(name) {
|
||||
|
|
|
@ -62,6 +62,8 @@ func TestIsLibraryLocal(t *testing.T) {
|
|||
{path: "beautifulsoup4 ~= 4.12.3", expected: false},
|
||||
{path: "beautifulsoup4[security, tests]", expected: false},
|
||||
{path: "beautifulsoup4[security, tests] ~= 4.12.3", expected: false},
|
||||
{path: "beautifulsoup4>=1.0.0,<2.0.0", expected: false},
|
||||
{path: "beautifulsoup4>=1.0.0,~=1.2.0,<2.0.0", expected: false},
|
||||
{path: "https://github.com/pypa/pip/archive/22.0.2.zip", expected: false},
|
||||
{path: "pip @ https://github.com/pypa/pip/archive/22.0.2.zip", expected: false},
|
||||
{path: "requests [security] @ https://github.com/psf/requests/archive/refs/heads/main.zip", expected: false},
|
||||
|
|
|
@ -2,6 +2,7 @@ package python
|
|||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
|
@ -38,7 +39,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
|
|||
tasks := libraries.FindTasksWithLocalLibraries(b)
|
||||
for _, task := range tasks {
|
||||
if task.NewCluster != nil {
|
||||
if lowerThanExpectedVersion(ctx, task.NewCluster.SparkVersion) {
|
||||
if lowerThanExpectedVersion(task.NewCluster.SparkVersion) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -47,7 +48,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
|
|||
for _, job := range b.Config.Resources.Jobs {
|
||||
for _, cluster := range job.JobClusters {
|
||||
if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster.SparkVersion != "" {
|
||||
if lowerThanExpectedVersion(ctx, cluster.NewCluster.SparkVersion) {
|
||||
if lowerThanExpectedVersion(cluster.NewCluster.SparkVersion) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -64,7 +65,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
if lowerThanExpectedVersion(ctx, version) {
|
||||
if lowerThanExpectedVersion(version) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -73,7 +74,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func lowerThanExpectedVersion(ctx context.Context, sparkVersion string) bool {
|
||||
func lowerThanExpectedVersion(sparkVersion string) bool {
|
||||
parts := strings.Split(sparkVersion, ".")
|
||||
if len(parts) < 2 {
|
||||
return false
|
||||
|
@ -82,6 +83,17 @@ func lowerThanExpectedVersion(ctx context.Context, sparkVersion string) bool {
|
|||
if parts[1][0] == 'x' { // treat versions like 13.x as the very latest minor (13.99)
|
||||
parts[1] = "99"
|
||||
}
|
||||
|
||||
// if any of the version parts are not numbers, we can't compare
|
||||
// so consider it as compatible version
|
||||
if _, err := strconv.Atoi(parts[0]); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if _, err := strconv.Atoi(parts[1]); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
v := "v" + parts[0] + "." + parts[1]
|
||||
return semver.Compare(v, "v13.1") < 0
|
||||
}
|
||||
|
|
|
@ -344,6 +344,8 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
|
|||
"14.1.x-scala2.12": false,
|
||||
"13.x-snapshot-scala-2.12": false,
|
||||
"13.x-rc-scala-2.12": false,
|
||||
"client.1.10-scala2.12": false,
|
||||
"latest-stable-gpu-scala2.11": false,
|
||||
"10.4.x-aarch64-photon-scala2.12": true,
|
||||
"10.4.x-scala2.12": true,
|
||||
"13.0.x-scala2.12": true,
|
||||
|
@ -351,7 +353,7 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
|
|||
}
|
||||
|
||||
for k, v := range testCases {
|
||||
result := lowerThanExpectedVersion(context.Background(), k)
|
||||
result := lowerThanExpectedVersion(k)
|
||||
require.Equal(t, v, result, k)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -127,6 +127,55 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
|
|||
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
|
||||
}
|
||||
|
||||
func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
|
||||
ctx, wt := acc.WorkspaceTest(t)
|
||||
w := wt.W
|
||||
|
||||
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
|
||||
uniqueId := uuid.New().String()
|
||||
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
|
||||
"unique_id": uniqueId,
|
||||
"node_type_id": nodeTypeId,
|
||||
"spark_version": defaultSparkVersion,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// deploy pipeline
|
||||
err = deployBundle(t, ctx, bundleRoot)
|
||||
require.NoError(t, err)
|
||||
|
||||
// assert pipeline is created
|
||||
pipelineName := "test-bundle-pipeline-" + uniqueId
|
||||
pipeline, err := w.Pipelines.GetByName(ctx, pipelineName)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, pipeline.Name, pipelineName)
|
||||
|
||||
// assert job is created
|
||||
jobName := "test-bundle-job-" + uniqueId
|
||||
job, err := w.Jobs.GetBySettingsName(ctx, jobName)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, job.Settings.Name, jobName)
|
||||
|
||||
// delete resources.yml
|
||||
err = os.Remove(filepath.Join(bundleRoot, "resources.yml"))
|
||||
require.NoError(t, err)
|
||||
|
||||
// Redeploy the bundle. Expect it to fail because deleting the pipeline requires --auto-approve.
|
||||
t.Setenv("BUNDLE_ROOT", bundleRoot)
|
||||
t.Setenv("TERM", "dumb")
|
||||
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock")
|
||||
stdout, stderr, err := c.Run()
|
||||
|
||||
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
|
||||
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following DLT Pipelines along with the
|
||||
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
|
||||
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
|
||||
properties such as the 'catalog' or 'storage' are changed:
|
||||
delete pipeline bar`)
|
||||
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
|
||||
|
||||
}
|
||||
|
||||
func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
|
||||
ctx, wt := acc.UcWorkspaceTest(t)
|
||||
w := wt.W
|
||||
|
@ -157,9 +206,6 @@ func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
|
|||
stdout, stderr, err := c.Run()
|
||||
|
||||
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
|
||||
|
||||
fmt.Println("expected: ", "This action will result in the deletion or recreation of the following DLT Pipelines along with the Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline properties such as the `catalog` or `storage` are changed::\n recreate pipeline foo")
|
||||
fmt.Println("stderr: ", stderr.String())
|
||||
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following DLT Pipelines along with the
|
||||
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
|
||||
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
|
||||
|
|
|
@ -267,6 +267,8 @@ func (n normalizeOptions) normalizeString(typ reflect.Type, src dyn.Value, path
|
|||
out = strconv.FormatInt(src.MustInt(), 10)
|
||||
case dyn.KindFloat:
|
||||
out = strconv.FormatFloat(src.MustFloat(), 'f', -1, 64)
|
||||
case dyn.KindTime:
|
||||
out = src.MustTime().String()
|
||||
case dyn.KindNil:
|
||||
// Return a warning if the field is present but has a null value.
|
||||
return dyn.InvalidValue, diags.Append(nullWarning(dyn.KindString, src, path))
|
||||
|
|
|
@ -569,6 +569,14 @@ func TestNormalizeStringFromFloat(t *testing.T) {
|
|||
assert.Equal(t, dyn.NewValue("1.2", vin.Locations()), vout)
|
||||
}
|
||||
|
||||
func TestNormalizeStringFromTime(t *testing.T) {
|
||||
var typ string
|
||||
vin := dyn.NewValue(dyn.MustTime("2024-08-29"), []dyn.Location{{File: "file", Line: 1, Column: 1}})
|
||||
vout, err := Normalize(&typ, vin)
|
||||
assert.Empty(t, err)
|
||||
assert.Equal(t, dyn.NewValue("2024-08-29", vin.Locations()), vout)
|
||||
}
|
||||
|
||||
func TestNormalizeStringError(t *testing.T) {
|
||||
var typ string
|
||||
vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")})
|
||||
|
|
|
@ -2,7 +2,6 @@ package dyn
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Kind int
|
||||
|
@ -34,7 +33,7 @@ func kindOf(v any) Kind {
|
|||
return KindInt
|
||||
case float32, float64:
|
||||
return KindFloat
|
||||
case time.Time:
|
||||
case Time:
|
||||
return KindTime
|
||||
case nil:
|
||||
return KindNil
|
||||
|
|
|
@ -83,16 +83,16 @@ func TestOverride_Primitive(t *testing.T) {
|
|||
{
|
||||
name: "time (updated)",
|
||||
state: visitorState{updated: []string{"root"}},
|
||||
left: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}),
|
||||
right: dyn.NewValue(time.UnixMilli(10001), []dyn.Location{rightLocation}),
|
||||
expected: dyn.NewValue(time.UnixMilli(10001), []dyn.Location{rightLocation}),
|
||||
left: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
|
||||
right: dyn.NewValue(dyn.FromTime(time.UnixMilli(10001)), []dyn.Location{rightLocation}),
|
||||
expected: dyn.NewValue(dyn.FromTime(time.UnixMilli(10001)), []dyn.Location{rightLocation}),
|
||||
},
|
||||
{
|
||||
name: "time (not updated)",
|
||||
state: visitorState{},
|
||||
left: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}),
|
||||
right: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{rightLocation}),
|
||||
expected: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}),
|
||||
left: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
|
||||
right: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{rightLocation}),
|
||||
expected: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
|
||||
},
|
||||
{
|
||||
name: "different types (updated)",
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
package dyn
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Time represents a time-like primitive value.
|
||||
//
|
||||
// It represents a timestamp and includes the original string value
|
||||
// that was parsed to create the timestamp. This makes it possible
|
||||
// to coalesce a value that YAML interprets as a timestamp back into
|
||||
// a string without losing information.
|
||||
type Time struct {
|
||||
t time.Time
|
||||
s string
|
||||
}
|
||||
|
||||
// NewTime creates a new Time from the given string.
|
||||
func NewTime(str string) (Time, error) {
|
||||
// Try a couple of layouts
|
||||
for _, layout := range []string{
|
||||
"2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
|
||||
"2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
|
||||
"2006-1-2 15:4:5.999999999", // space separated with no time zone
|
||||
"2006-1-2", // date only
|
||||
} {
|
||||
t, terr := time.Parse(layout, str)
|
||||
if terr == nil {
|
||||
return Time{t: t, s: str}, nil
|
||||
}
|
||||
}
|
||||
|
||||
return Time{}, fmt.Errorf("invalid time value: %q", str)
|
||||
}
|
||||
|
||||
// MustTime creates a new Time from the given string.
|
||||
// It panics if the string cannot be parsed.
|
||||
func MustTime(str string) Time {
|
||||
t, err := NewTime(str)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// FromTime creates a new Time from the given time.Time.
|
||||
// It uses the RFC3339Nano format for its string representation.
|
||||
// This guarantees that it can roundtrip into a string without losing information.
|
||||
func FromTime(t time.Time) Time {
|
||||
return Time{t: t, s: t.Format(time.RFC3339Nano)}
|
||||
}
|
||||
|
||||
// Time returns the time.Time value.
|
||||
func (t Time) Time() time.Time {
|
||||
return t.t
|
||||
}
|
||||
|
||||
// String returns the original string value that was parsed to create the timestamp.
|
||||
func (t Time) String() string {
|
||||
return t.s
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package dyn_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
assert "github.com/databricks/cli/libs/dyn/dynassert"
|
||||
)
|
||||
|
||||
func TestTimeValid(t *testing.T) {
|
||||
for _, tc := range []string{
|
||||
"2024-08-29",
|
||||
"2024-01-15T12:34:56.789012345Z",
|
||||
} {
|
||||
tm, err := dyn.NewTime(tc)
|
||||
if assert.NoError(t, err) {
|
||||
assert.NotEqual(t, time.Time{}, tm.Time())
|
||||
assert.Equal(t, tc, tm.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTimeInvalid(t *testing.T) {
|
||||
tm, err := dyn.NewTime("invalid")
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, dyn.Time{}, tm)
|
||||
}
|
||||
|
||||
func TestTimeFromTime(t *testing.T) {
|
||||
tref := time.Now()
|
||||
t1 := dyn.FromTime(tref)
|
||||
|
||||
// Verify that the underlying value is the same.
|
||||
assert.Equal(t, tref, t1.Time())
|
||||
|
||||
// Verify that the string representation can be used to construct the same.
|
||||
t2, err := dyn.NewTime(t1.String())
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, t1.Time().Equal(t2.Time()))
|
||||
}
|
|
@ -127,7 +127,8 @@ func (v Value) AsAny() any {
|
|||
case KindFloat:
|
||||
return v.v
|
||||
case KindTime:
|
||||
return v.v
|
||||
t := v.v.(Time)
|
||||
return t.Time()
|
||||
default:
|
||||
// Panic because we only want to deal with known types.
|
||||
panic(fmt.Sprintf("invalid kind: %d", v.k))
|
||||
|
|
|
@ -2,7 +2,6 @@ package dyn
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
// AsMap returns the underlying mapping if this value is a map,
|
||||
|
@ -123,14 +122,14 @@ func (v Value) MustFloat() float64 {
|
|||
|
||||
// AsTime returns the underlying time if this value is a time,
|
||||
// the zero value and false otherwise.
|
||||
func (v Value) AsTime() (time.Time, bool) {
|
||||
vv, ok := v.v.(time.Time)
|
||||
func (v Value) AsTime() (Time, bool) {
|
||||
vv, ok := v.v.(Time)
|
||||
return vv, ok
|
||||
}
|
||||
|
||||
// MustTime returns the underlying time if this value is a time,
|
||||
// panics otherwise.
|
||||
func (v Value) MustTime() time.Time {
|
||||
func (v Value) MustTime() Time {
|
||||
vv, ok := v.AsTime()
|
||||
if !ok || v.k != KindTime {
|
||||
panic(fmt.Sprintf("expected kind %s, got %s", KindTime, v.k))
|
||||
|
|
|
@ -143,7 +143,7 @@ func TestValueUnderlyingFloat(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestValueUnderlyingTime(t *testing.T) {
|
||||
v := dyn.V(time.Now())
|
||||
v := dyn.V(dyn.FromTime(time.Now()))
|
||||
|
||||
vv1, ok := v.AsTime()
|
||||
assert.True(t, ok)
|
||||
|
|
|
@ -5,7 +5,6 @@ import (
|
|||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
@ -207,17 +206,9 @@ func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error
|
|||
case "!!null":
|
||||
return dyn.NewValue(nil, []dyn.Location{loc}), nil
|
||||
case "!!timestamp":
|
||||
// Try a couple of layouts
|
||||
for _, layout := range []string{
|
||||
"2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
|
||||
"2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
|
||||
"2006-1-2 15:4:5.999999999", // space separated with no time zone
|
||||
"2006-1-2", // date only
|
||||
} {
|
||||
t, terr := time.Parse(layout, node.Value)
|
||||
if terr == nil {
|
||||
return dyn.NewValue(t, []dyn.Location{loc}), nil
|
||||
}
|
||||
t, err := dyn.NewTime(node.Value)
|
||||
if err == nil {
|
||||
return dyn.NewValue(t, []dyn.Location{loc}), nil
|
||||
}
|
||||
return dyn.InvalidValue, errorf(loc, "invalid timestamp value: %v", node.Value)
|
||||
default:
|
||||
|
|
|
@ -129,7 +129,7 @@ func (s *saver) toYamlNodeWithStyle(v dyn.Value, style yaml.Style) (*yaml.Node,
|
|||
case dyn.KindFloat:
|
||||
return &yaml.Node{Kind: yaml.ScalarNode, Value: fmt.Sprint(v.MustFloat()), Style: style}, nil
|
||||
case dyn.KindTime:
|
||||
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustTime().UTC().String(), Style: style}, nil
|
||||
return &yaml.Node{Kind: yaml.ScalarNode, Value: v.MustTime().String(), Style: style}, nil
|
||||
default:
|
||||
// Panic because we only want to deal with known types.
|
||||
panic(fmt.Sprintf("invalid kind: %d", v.Kind()))
|
||||
|
|
|
@ -2,10 +2,10 @@ package yamlsaver
|
|||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
assert "github.com/databricks/cli/libs/dyn/dynassert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
|
@ -45,11 +45,14 @@ func TestMarshalBoolValue(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestMarshalTimeValue(t *testing.T) {
|
||||
tm, err := dyn.NewTime("1970-01-01")
|
||||
require.NoError(t, err)
|
||||
|
||||
s := NewSaver()
|
||||
var timeValue = dyn.V(time.Unix(0, 0))
|
||||
var timeValue = dyn.V(tm)
|
||||
v, err := s.toYamlNode(timeValue)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "1970-01-01 00:00:00 +0000 UTC", v.Value)
|
||||
assert.Equal(t, "1970-01-01", v.Value)
|
||||
assert.Equal(t, yaml.ScalarNode, v.Kind)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{{define "latest_lts_dbr_version" -}}
|
||||
13.3.x-scala2.12
|
||||
15.4.x-scala2.12
|
||||
{{- end}}
|
||||
|
||||
{{define "latest_lts_db_connect_version_spec" -}}
|
||||
>=13.3,<13.4
|
||||
>=15.4,<15.5
|
||||
{{- end}}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{{define "latest_lts_dbr_version" -}}
|
||||
13.3.x-scala2.12
|
||||
15.4.x-scala2.12
|
||||
{{- end}}
|
||||
|
||||
{{define "latest_lts_db_connect_version_spec" -}}
|
||||
>=13.3,<13.4
|
||||
>=15.4,<15.5
|
||||
{{- end}}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{{define "latest_lts_dbr_version" -}}
|
||||
13.3.x-scala2.12
|
||||
15.4.x-scala2.12
|
||||
{{- end}}
|
||||
|
||||
{{define "latest_lts_db_connect_version_spec" -}}
|
||||
>=13.3,<13.4
|
||||
>=15.4,<15.5
|
||||
{{- end}}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
|
||||
{{- /* We can't use a materialized view here since they don't support 'create or refresh' yet.*/}}
|
||||
|
||||
USE CATALOG {{"{{"}}catalog{{"}}"}};
|
||||
USE IDENTIFIER({{"{{"}}schema{{"}}"}});
|
||||
|
||||
CREATE OR REPLACE VIEW
|
||||
CREATE OR REPLACE MATERIALIZED VIEW
|
||||
orders_daily
|
||||
AS SELECT
|
||||
order_date, count(*) AS number_of_orders
|
||||
|
|
Loading…
Reference in New Issue