Merge remote-tracking branch 'origin' into auth-validate

This commit is contained in:
Shreyas Goenka 2025-02-10 13:28:55 +01:00
commit 440aafc0e5
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
123 changed files with 1544 additions and 300 deletions

4
.gitignore vendored
View File

@ -25,11 +25,7 @@ coverage-acceptance.txt
__pycache__ __pycache__
*.pyc *.pyc
.terraform
.terraform.lock.hcl
.vscode/launch.json .vscode/launch.json
.vscode/tasks.json .vscode/tasks.json
.databricks
.ruff_cache .ruff_cache

View File

@ -1,4 +1,4 @@
default: vendor fmt lint default: vendor fmt lint tidy
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... . PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
@ -9,6 +9,10 @@ GOTESTSUM_CMD ?= gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped
lint: lint:
golangci-lint run --fix golangci-lint run --fix
tidy:
@# not part of golangci-lint, apparently
go mod tidy
lintcheck: lintcheck:
golangci-lint run ./... golangci-lint run ./...
@ -59,4 +63,4 @@ integration: vendor
integration-short: vendor integration-short: vendor
$(INTEGRATION) -short $(INTEGRATION) -short
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs .PHONY: lint tidy lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs

5
NOTICE
View File

@ -109,3 +109,8 @@ License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
https://github.com/BurntSushi/toml https://github.com/BurntSushi/toml
Copyright (c) 2013 TOML authors Copyright (c) 2013 TOML authors
https://github.com/BurntSushi/toml/blob/master/COPYING https://github.com/BurntSushi/toml/blob/master/COPYING
dario.cat/mergo
Copyright (c) 2013 Dario Castañé. All rights reserved.
Copyright (c) 2012 The Go Authors. All rights reserved.
https://github.com/darccio/mergo/blob/master/LICENSE

View File

@ -19,6 +19,8 @@ import (
"time" "time"
"unicode/utf8" "unicode/utf8"
"github.com/google/uuid"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/testdiff" "github.com/databricks/cli/libs/testdiff"
@ -123,7 +125,6 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
AddHandlers(defaultServer) AddHandlers(defaultServer)
// Redirect API access to local server: // Redirect API access to local server:
t.Setenv("DATABRICKS_HOST", defaultServer.URL) t.Setenv("DATABRICKS_HOST", defaultServer.URL)
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
homeDir := t.TempDir() homeDir := t.TempDir()
// Do not read user's ~/.databrickscfg // Do not read user's ~/.databrickscfg
@ -146,7 +147,15 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
// do it last so that full paths match first: // do it last so that full paths match first:
repls.SetPath(buildDir, "[BUILD_DIR]") repls.SetPath(buildDir, "[BUILD_DIR]")
workspaceClient, err := databricks.NewWorkspaceClient() var config databricks.Config
if cloudEnv == "" {
// use fake token for local tests
config = databricks.Config{Token: "dbapi1234"}
} else {
// non-local tests rely on environment variables
config = databricks.Config{}
}
workspaceClient, err := databricks.NewWorkspaceClient(&config)
require.NoError(t, err) require.NoError(t, err)
user, err := workspaceClient.CurrentUser.Me(ctx) user, err := workspaceClient.CurrentUser.Me(ctx)
@ -264,7 +273,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
for _, stub := range config.Server { for _, stub := range config.Server {
require.NotEmpty(t, stub.Pattern) require.NotEmpty(t, stub.Pattern)
server.Handle(stub.Pattern, func(req *http.Request) (any, int) { server.Handle(stub.Pattern, func(fakeWorkspace *testserver.FakeWorkspace, req *http.Request) (any, int) {
statusCode := http.StatusOK statusCode := http.StatusOK
if stub.Response.StatusCode != 0 { if stub.Response.StatusCode != 0 {
statusCode = stub.Response.StatusCode statusCode = stub.Response.StatusCode
@ -285,6 +294,15 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir) cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
} }
// Each local test should use a new token that will result into a new fake workspace,
// so that test don't interfere with each other.
if cloudEnv == "" {
tokenSuffix := strings.ReplaceAll(uuid.NewString(), "-", "")
token := "dbapi" + tokenSuffix
cmd.Env = append(cmd.Env, "DATABRICKS_TOKEN="+token)
repls.Set(token, "[DATABRICKS_TOKEN]")
}
// Write combined output to a file // Write combined output to a file
out, err := os.Create(filepath.Join(tmpDir, "output.txt")) out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
require.NoError(t, err) require.NoError(t, err)
@ -303,8 +321,8 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
reqJson, err := json.Marshal(req) reqJson, err := json.Marshal(req)
require.NoError(t, err) require.NoError(t, err)
line := fmt.Sprintf("%s\n", reqJson) reqJsonWithRepls := repls.Replace(string(reqJson))
_, err = f.WriteString(line) _, err = f.WriteString(reqJsonWithRepls + "\n")
require.NoError(t, err) require.NoError(t, err)
} }
@ -325,6 +343,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
// Make sure there are not unaccounted for new files // Make sure there are not unaccounted for new files
files := ListDir(t, tmpDir) files := ListDir(t, tmpDir)
unexpected := []string{}
for _, relPath := range files { for _, relPath := range files {
if _, ok := inputs[relPath]; ok { if _, ok := inputs[relPath]; ok {
continue continue
@ -332,13 +351,17 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
if _, ok := outputs[relPath]; ok { if _, ok := outputs[relPath]; ok {
continue continue
} }
t.Errorf("Unexpected output: %s", relPath) unexpected = append(unexpected, relPath)
if strings.HasPrefix(relPath, "out") { if strings.HasPrefix(relPath, "out") {
// We have a new file starting with "out" // We have a new file starting with "out"
// Show the contents & support overwrite mode for it: // Show the contents & support overwrite mode for it:
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls) doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
} }
} }
if len(unexpected) > 0 {
t.Error("Test produced unexpected files:\n" + strings.Join(unexpected, "\n"))
}
} }
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) { func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {

10
acceptance/bin/sort_lines.py Executable file
View File

@ -0,0 +1,10 @@
#!/usr/bin/env python3
"""
Helper to sort lines in text file. Similar to 'sort' but no dependence on locale or presence of 'sort' in PATH.
"""
import sys
lines = sys.stdin.readlines()
lines.sort()
sys.stdout.write("".join(lines))

View File

@ -0,0 +1,50 @@
bundle:
name: same_name_libraries
variables:
cluster:
default:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode
artifacts:
whl1:
type: whl
path: ./whl1
whl2:
type: whl
path: ./whl2
resources:
jobs:
test:
name: "test"
tasks:
- task_key: task1
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl
- task_key: task2
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl2/dist/*.whl
- task_key: task3
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl

View File

@ -0,0 +1,14 @@
>>> errcode [CLI] bundle deploy
Building whl1...
Building whl2...
Error: Duplicate local library name my_default_python-0.0.1-py3-none-any.whl
at resources.jobs.test.tasks[0].libraries[0].whl
resources.jobs.test.tasks[1].libraries[0].whl
in databricks.yml:36:15
databricks.yml:43:15
Local library names must be unique
Exit code: 1

View File

@ -0,0 +1,2 @@
trace errcode $CLI bundle deploy
rm -rf whl1 whl2

View File

@ -0,0 +1,36 @@
"""
setup.py configuration script describing how to build and package this project.
This file is primarily used by the setuptools library and typically should not
be executed directly. See README.md for how to deploy, test, and run
the my_default_python project.
"""
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_default_python
setup(
name="my_default_python",
version=my_default_python.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_default_python.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,36 @@
"""
setup.py configuration script describing how to build and package this project.
This file is primarily used by the setuptools library and typically should not
be executed directly. See README.md for how to deploy, test, and run
the my_default_python project.
"""
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_default_python
setup(
name="my_default_python",
version=my_default_python.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_default_python.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,2 @@
bundle:
name: debug

View File

@ -0,0 +1,15 @@
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly)
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: non-retriable error: pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< {} pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< {} pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true

View File

@ -0,0 +1,92 @@
10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug"
10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345
10:07:59 Debug: Apply pid=12345 mutator=load
10:07:59 Info: Phase: load pid=12345 mutator=load
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EntryPoint
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=scripts.preinit
10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=load mutator=seq mutator=scripts.preinit
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes mutator=seq
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=VerifyCliVersion
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EnvironmentsToTargets
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ComputeIdToClusterId
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=InitializeVariables
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=DefineDefaultTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=PythonMutator(load)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=validate:unique_resource_keys
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget mutator=SelectTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=<func>
10:07:59 Debug: Apply pid=12345 mutator=initialize
10:07:59 Info: Phase: initialize pid=12345 mutator=initialize
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=validate:AllResourcesHaveValues
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteSyncPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncDefaultPath
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncInferRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser
10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me
< HTTP/1.1 200 OK
< {
< "id": "[USERID]",
< "userName": "[USERNAME]"
< } pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser sdk=true
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=LoadGitDetails
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplySourceLinkedDeploymentPreset
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefineDefaultWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultWorkspacePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PrependWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetVariables
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(init)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(load_resources)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(apply_mutators)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveResourceReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobParameters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobTasks
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergePipelineClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeApps
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CaptureSchemaDependency
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CheckPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetRunAs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=OverrideCompute
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureDashboardDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureVolumeDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ProcessTargetMode
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyPresets
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultQueueing
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandPipelineGlobPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureWSFS
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=TranslatePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonWrapperWarning
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=apps.Validate
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ValidateSharedRootPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyBundlePermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=FilterCurrentUserFromPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotateJobs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotatePipelines
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
10:07:59 Debug: Apply pid=12345 mutator=validate
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 404 Not Found
10:07:59 Debug: POST /api/2.0/workspace/mkdirs
> {
> "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
> }
< HTTP/1.1 200 OK
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 200 OK
< {
< "object_type": "DIRECTORY",
< "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
10:07:59 Info: completed execution pid=12345 exit_code=0

View File

@ -0,0 +1,7 @@
Name: debug
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/debug/default
Validation OK!

View File

@ -0,0 +1,4 @@
$CLI bundle validate --debug 2> full.stderr.txt
grep -vw parallel full.stderr.txt > out.stderr.txt
grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt
rm full.stderr.txt

View File

@ -0,0 +1,18 @@
LocalOnly = true
[[Repls]]
# The keys are unsorted and also vary per OS
Old = 'Environment variables for Terraform: ([A-Z_ ,]+) '
New = 'Environment variables for Terraform: ...redacted... '
[[Repls]]
Old = 'pid=[0-9]+'
New = 'pid=12345'
[[Repls]]
Old = '\d\d:\d\d:\d\d'
New = '10:07:59'
[[Repls]]
Old = '\\'
New = '/'

View File

@ -0,0 +1,2 @@
bundle:
name: git_job

View File

@ -0,0 +1,17 @@
resources:
jobs:
out:
name: gitjob
tasks:
- task_key: test_task
notebook_task:
notebook_path: some/test/notebook.py
- task_key: test_task_2
notebook_task:
notebook_path: /Workspace/Users/foo@bar.com/some/test/notebook.py
source: WORKSPACE
git_source:
git_branch: main
git_commit: abcdef
git_provider: github
git_url: https://git.databricks.com

View File

@ -0,0 +1,2 @@
Job is using Git source, skipping downloading files
Job configuration successfully saved to out.job.yml

View File

@ -0,0 +1 @@
$CLI bundle generate job --existing-job-id 1234 --config-dir . --key out

View File

@ -0,0 +1,33 @@
LocalOnly = true # This test needs to run against stubbed Databricks API
[[Server]]
Pattern = "GET /api/2.1/jobs/get"
Response.Body = '''
{
"job_id": 11223344,
"settings": {
"name": "gitjob",
"git_source": {
"git_url": "https://git.databricks.com",
"git_provider": "github",
"git_branch": "main",
"git_commit": "abcdef"
},
"tasks": [
{
"task_key": "test_task",
"notebook_task": {
"notebook_path": "some/test/notebook.py"
}
},
{
"task_key": "test_task_2",
"notebook_task": {
"source": "WORKSPACE",
"notebook_path": "/Workspace/Users/foo@bar.com/some/test/notebook.py"
}
}
]
}
}
'''

View File

@ -3,6 +3,7 @@
>>> chmod 000 .git >>> chmod 000 .git
>>> [CLI] bundle validate >>> [CLI] bundle validate
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
Name: git-permerror Name: git-permerror
@ -16,6 +17,7 @@ Found 1 error
Exit code: 1 Exit code: 1
>>> [CLI] bundle validate -o json >>> [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
@ -25,6 +27,7 @@ Exit code: 1
} }
>>> withdir subdir/a/b [CLI] bundle validate -o json >>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
@ -39,11 +42,15 @@ Exit code: 1
>>> chmod 000 .git/HEAD >>> chmod 000 .git/HEAD
>>> [CLI] bundle validate -o json >>> [CLI] bundle validate -o json
Warn: failed to load current branch: open HEAD: permission denied
Warn: failed to load latest commit: open HEAD: permission denied
{ {
"bundle_root_path": "." "bundle_root_path": "."
} }
>>> withdir subdir/a/b [CLI] bundle validate -o json >>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to load current branch: open HEAD: permission denied
Warn: failed to load latest commit: open HEAD: permission denied
{ {
"bundle_root_path": "." "bundle_root_path": "."
} }
@ -54,6 +61,7 @@ Exit code: 1
>>> chmod 000 .git/config >>> chmod 000 .git/config
>>> [CLI] bundle validate -o json >>> [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
@ -63,6 +71,7 @@ Exit code: 1
} }
>>> withdir subdir/a/b [CLI] bundle validate -o json >>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied

View File

@ -1,4 +1,4 @@
Badness = "Warning logs not shown; inferred flag is set to true incorrect; bundle_root_path is not correct" Badness = "inferred flag is set to true incorrect; bundle_root_path is not correct; Warn and Error talk about the same; Warn goes to stderr, Error goes to stdout (for backward compat); Warning about permissions repeated twice"
[GOOS] [GOOS]
# This test relies on chmod which does not work on Windows # This test relies on chmod which does not work on Windows

View File

@ -42,11 +42,9 @@ from myscript.py 0 postbuild: hello stderr!
Executing 'predeploy' script Executing 'predeploy' script
from myscript.py 0 predeploy: hello stdout! from myscript.py 0 predeploy: hello stdout!
from myscript.py 0 predeploy: hello stderr! from myscript.py 0 predeploy: hello stderr!
Error: unable to deploy to /Workspace/Users/[USERNAME]/.bundle/scripts/default/state as [USERNAME]. Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/scripts/default/files...
Please make sure the current user or one of their groups is listed under the permissions of this bundle. Deploying resources...
For assistance, contact the owners of this project. Deployment complete!
They may need to redeploy the bundle to apply the new permissions. Executing 'postdeploy' script
Please refer to https://docs.databricks.com/dev-tools/bundles/permissions.html for more on managing permissions. from myscript.py 0 postdeploy: hello stdout!
from myscript.py 0 postdeploy: hello stderr!
Exit code: 1

View File

@ -1 +0,0 @@
LocalOnly = true # Deployment currently fails when run locally; once that is fixed, remove this setting

View File

@ -0,0 +1,3 @@
Error: failed to compute file content for helpers.txt.tmpl. template: :1:14: executing "" at <user_name>: error calling user_name:
Exit code: 1

View File

@ -0,0 +1 @@
$CLI bundle init .

View File

@ -0,0 +1 @@
user_name: {{ user_name }}

View File

@ -0,0 +1,7 @@
Badness = '''(minor) error message is not great: executing "" at <user_name>: error calling user_name:'''
LocalOnly = true
[[Server]]
Pattern = "GET /api/2.0/preview/scim/v2/Me"
Response.Body = '{}'
Response.StatusCode = 500

View File

@ -0,0 +1,2 @@
✨ Successfully initialized template
user_name: [USERNAME]

View File

@ -0,0 +1,3 @@
$CLI bundle init .
cat helpers.txt
rm helpers.txt

View File

@ -0,0 +1 @@
user_name: {{ user_name }}

View File

@ -0,0 +1 @@
LocalOnly = true

View File

@ -0,0 +1,2 @@
# Testing template machinery, by default there is no need to check against cloud.
LocalOnly = true

View File

@ -3,3 +3,6 @@ trace $CLI bundle init dbt-sql --config-file ./input.json --output-dir output
cd output/my_dbt_sql cd output/my_dbt_sql
trace $CLI bundle validate -t dev trace $CLI bundle validate -t dev
trace $CLI bundle validate -t prod trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore

View File

@ -3,3 +3,6 @@ trace $CLI bundle init default-python --config-file ./input.json --output-dir ou
cd output/my_default_python cd output/my_default_python
trace $CLI bundle validate -t dev trace $CLI bundle validate -t dev
trace $CLI bundle validate -t prod trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore

View File

@ -3,3 +3,6 @@ trace $CLI bundle init default-sql --config-file ./input.json --output-dir outpu
cd output/my_default_sql cd output/my_default_sql
trace $CLI bundle validate -t dev trace $CLI bundle validate -t dev
trace $CLI bundle validate -t prod trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore

View File

@ -8,3 +8,6 @@ uv sync -q
trace $CLI bundle validate -t dev --output json | jq ".resources" trace $CLI bundle validate -t dev --output json | jq ".resources"
rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info
# Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore

View File

@ -1,2 +0,0 @@
# Cloud run fails with Error: failed to resolve cluster-policy: wrong-cluster-policy, err: Policy named 'wrong-cluster-policy' does not exist
LocalOnly = true

View File

@ -1,2 +0,0 @@
# Cloud run fails with Error: Path (TestResolveVariableReferences/bar/baz) doesn't start with '/'
LocalOnly = true

View File

@ -1,2 +0,0 @@
# Cloud run fails with Error: Path (TestResolveVariableReferencesToBundleVariables/bar/files) doesn't start with '/'
LocalOnly = true

View File

@ -0,0 +1,3 @@
# The tests here intend to test variable interpolation via "bundle validate".
# Even though "bundle validate" does a few API calls, that's not the focus there.
LocalOnly = true

View File

@ -15,7 +15,10 @@ import (
func StartCmdServer(t *testing.T) *testserver.Server { func StartCmdServer(t *testing.T) *testserver.Server {
server := testserver.New(t) server := testserver.New(t)
server.Handle("/", func(r *http.Request) (any, int) { // {$} is a wildcard that only matches the end of the URL. We explicitly use
// /{$} to disambiguate it from the generic handler for '/' which is used to
// identify unhandled API endpoints in the test server.
server.Handle("/{$}", func(w *testserver.FakeWorkspace, r *http.Request) (any, int) {
q := r.URL.Query() q := r.URL.Query()
args := strings.Split(q.Get("args"), " ") args := strings.Split(q.Get("args"), " ")

View File

@ -3,9 +3,11 @@ package acceptance_test
import ( import (
"os" "os"
"path/filepath" "path/filepath"
"sync" "slices"
"strings"
"testing" "testing"
"dario.cat/mergo"
"github.com/BurntSushi/toml" "github.com/BurntSushi/toml"
"github.com/databricks/cli/libs/testdiff" "github.com/databricks/cli/libs/testdiff"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -13,11 +15,6 @@ import (
const configFilename = "test.toml" const configFilename = "test.toml"
var (
configCache map[string]TestConfig
configMutex sync.Mutex
)
type TestConfig struct { type TestConfig struct {
// Place to describe what's wrong with this test. Does not affect how the test is run. // Place to describe what's wrong with this test. Does not affect how the test is run.
Badness string Badness string
@ -65,58 +62,55 @@ type ServerStub struct {
} }
} }
// FindConfig finds the closest config file. // FindConfigs finds all the config relevant for this test,
func FindConfig(t *testing.T, dir string) (string, bool) { // ordered from the most outermost (at acceptance/) to current test directory (identified by dir).
shared := false // Argument dir must be a relative path from the root of acceptance tests (<project_root>/acceptance/).
func FindConfigs(t *testing.T, dir string) []string {
configs := []string{}
for { for {
path := filepath.Join(dir, configFilename) path := filepath.Join(dir, configFilename)
_, err := os.Stat(path) _, err := os.Stat(path)
if err == nil { if err == nil {
return path, shared configs = append(configs, path)
} }
shared = true
if dir == "" || dir == "." { if dir == "" || dir == "." {
break break
} }
if os.IsNotExist(err) { dir = filepath.Dir(dir)
dir = filepath.Dir(dir)
if err == nil || os.IsNotExist(err) {
continue continue
} }
t.Fatalf("Error while reading %s: %s", path, err) t.Fatalf("Error while reading %s: %s", path, err)
} }
t.Fatal("Config not found: " + configFilename) slices.Reverse(configs)
return "", shared return configs
} }
// LoadConfig loads the config file. Non-leaf configs are cached. // LoadConfig loads the config file. Non-leaf configs are cached.
func LoadConfig(t *testing.T, dir string) (TestConfig, string) { func LoadConfig(t *testing.T, dir string) (TestConfig, string) {
path, leafConfig := FindConfig(t, dir) configs := FindConfigs(t, dir)
if leafConfig { if len(configs) == 0 {
return DoLoadConfig(t, path), path return TestConfig{}, "(no config)"
} }
configMutex.Lock() result := DoLoadConfig(t, configs[0])
defer configMutex.Unlock()
if configCache == nil { for _, cfgName := range configs[1:] {
configCache = make(map[string]TestConfig) cfg := DoLoadConfig(t, cfgName)
err := mergo.Merge(&result, cfg, mergo.WithOverride, mergo.WithAppendSlice)
if err != nil {
t.Fatalf("Error during config merge: %s: %s", cfgName, err)
}
} }
result, ok := configCache[path] return result, strings.Join(configs, ", ")
if ok {
return result, path
}
result = DoLoadConfig(t, path)
configCache[path] = result
return result, path
} }
func DoLoadConfig(t *testing.T, path string) TestConfig { func DoLoadConfig(t *testing.T, path string) TestConfig {

View File

@ -1,17 +1,23 @@
package acceptance_test package acceptance_test
import ( import (
"bytes"
"encoding/json"
"fmt"
"net/http" "net/http"
"github.com/databricks/cli/libs/testserver"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/iam"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/cli/libs/testserver"
"github.com/databricks/databricks-sdk-go/service/workspace" "github.com/databricks/databricks-sdk-go/service/workspace"
) )
func AddHandlers(server *testserver.Server) { func AddHandlers(server *testserver.Server) {
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, int) { server.Handle("GET /api/2.0/policies/clusters/list", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return compute.ListPoliciesResponse{ return compute.ListPoliciesResponse{
Policies: []compute.Policy{ Policies: []compute.Policy{
{ {
@ -26,7 +32,7 @@ func AddHandlers(server *testserver.Server) {
}, http.StatusOK }, http.StatusOK
}) })
server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, int) { server.Handle("GET /api/2.0/instance-pools/list", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return compute.ListInstancePools{ return compute.ListInstancePools{
InstancePools: []compute.InstancePoolAndStats{ InstancePools: []compute.InstancePoolAndStats{
{ {
@ -37,7 +43,7 @@ func AddHandlers(server *testserver.Server) {
}, http.StatusOK }, http.StatusOK
}) })
server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, int) { server.Handle("GET /api/2.1/clusters/list", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return compute.ListClustersResponse{ return compute.ListClustersResponse{
Clusters: []compute.ClusterDetails{ Clusters: []compute.ClusterDetails{
{ {
@ -52,31 +58,74 @@ func AddHandlers(server *testserver.Server) {
}, http.StatusOK }, http.StatusOK
}) })
server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, int) { server.Handle("GET /api/2.0/preview/scim/v2/Me", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return iam.User{ return iam.User{
Id: "1000012345", Id: "1000012345",
UserName: "tester@databricks.com", UserName: "tester@databricks.com",
}, http.StatusOK }, http.StatusOK
}) })
server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, int) { server.Handle("GET /api/2.0/workspace/get-status", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return workspace.ObjectInfo{ path := r.URL.Query().Get("path")
ObjectId: 1001,
ObjectType: "DIRECTORY", return fakeWorkspace.WorkspaceGetStatus(path)
Path: "",
ResourceId: "1001",
}, http.StatusOK
}) })
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, int) { server.Handle("POST /api/2.0/workspace/mkdirs", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
request := workspace.Mkdirs{}
decoder := json.NewDecoder(r.Body)
err := decoder.Decode(&request)
if err != nil {
return internalError(err)
}
return fakeWorkspace.WorkspaceMkdirs(request)
})
server.Handle("GET /api/2.0/workspace/export", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
path := r.URL.Query().Get("path")
return fakeWorkspace.WorkspaceExport(path)
})
server.Handle("POST /api/2.0/workspace/delete", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
path := r.URL.Query().Get("path")
recursiveStr := r.URL.Query().Get("recursive")
var recursive bool
if recursiveStr == "true" {
recursive = true
} else {
recursive = false
}
return fakeWorkspace.WorkspaceDelete(path, recursive)
})
server.Handle("POST /api/2.0/workspace-files/import-file/{path}", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
path := r.PathValue("path")
body := new(bytes.Buffer)
_, err := body.ReadFrom(r.Body)
if err != nil {
return internalError(err)
}
return fakeWorkspace.WorkspaceFilesImportFile(path, body.Bytes())
})
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return catalog.MetastoreAssignment{ return catalog.MetastoreAssignment{
DefaultCatalogName: "main", DefaultCatalogName: "main",
}, http.StatusOK }, http.StatusOK
}) })
server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, int) { server.Handle("GET /api/2.0/permissions/directories/{objectId}", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
objectId := r.PathValue("objectId")
return workspace.WorkspaceObjectPermissions{ return workspace.WorkspaceObjectPermissions{
ObjectId: "1001", ObjectId: objectId,
ObjectType: "DIRECTORY", ObjectType: "DIRECTORY",
AccessControlList: []workspace.WorkspaceObjectAccessControlResponse{ AccessControlList: []workspace.WorkspaceObjectAccessControlResponse{
{ {
@ -91,8 +140,26 @@ func AddHandlers(server *testserver.Server) {
}, http.StatusOK }, http.StatusOK
}) })
server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, int) { server.Handle("POST /api/2.1/jobs/create", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return "{}", http.StatusOK request := jobs.CreateJob{}
decoder := json.NewDecoder(r.Body)
err := decoder.Decode(&request)
if err != nil {
return internalError(err)
}
return fakeWorkspace.JobsCreate(request)
})
server.Handle("GET /api/2.1/jobs/get", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
jobId := r.URL.Query().Get("job_id")
return fakeWorkspace.JobsGet(jobId)
})
server.Handle("GET /api/2.1/jobs/list", func(fakeWorkspace *testserver.FakeWorkspace, r *http.Request) (any, int) {
return fakeWorkspace.JobsList()
}) })
server.Handle("GET /oidc/.well-known/oauth-authorization-server", func(r *http.Request) (any, int) { server.Handle("GET /oidc/.well-known/oauth-authorization-server", func(r *http.Request) (any, int) {
@ -111,3 +178,7 @@ func AddHandlers(server *testserver.Server) {
}, http.StatusOK }, http.StatusOK
}) })
} }
func internalError(err error) (any, int) {
return fmt.Errorf("internal error: %w", err), http.StatusInternalServerError
}

View File

@ -1,2 +0,0 @@
# If test directory nor any of its parents do not have test.toml then this file serves as fallback configuration.
# The configurations are not merged across parents; the closest one is used fully.

View File

@ -1 +1 @@
{"headers":{"Authorization":"Bearer dapi1234","User-Agent":"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/jobs_create cmd-exec-id/[UUID] auth/pat"},"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}} {"headers":{"Authorization":["Bearer [DATABRICKS_TOKEN]"],"User-Agent":["cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/jobs_create cmd-exec-id/[UUID] auth/pat"]},"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}}

View File

@ -29,7 +29,7 @@ func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics
// We intentionally ignore the error because it is not critical to the deployment // We intentionally ignore the error because it is not critical to the deployment
err := client.Delete(ctx, ".", filer.DeleteRecursively) err := client.Delete(ctx, ".", filer.DeleteRecursively)
if err != nil { if err != nil {
log.Errorf(ctx, "failed to delete %s: %v", uploadPath, err) log.Debugf(ctx, "failed to delete %s: %v", uploadPath, err)
} }
err = client.Mkdir(ctx, ".") err = client.Mkdir(ctx, ".")

View File

@ -13,7 +13,6 @@ var (
func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) { func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) {
value := make(map[string]dyn.Value) value := make(map[string]dyn.Value)
if job.Settings.Tasks != nil { if job.Settings.Tasks != nil {
tasks := make([]dyn.Value, 0) tasks := make([]dyn.Value, 0)
for _, task := range job.Settings.Tasks { for _, task := range job.Settings.Tasks {

View File

@ -84,7 +84,7 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
// Pipelines presets: Prefix, PipelinesDevelopment // Pipelines presets: Prefix, PipelinesDevelopment
for key, p := range r.Pipelines { for key, p := range r.Pipelines {
if p.PipelineSpec == nil { if p.CreatePipeline == nil {
diags = diags.Extend(diag.Errorf("pipeline %s is not defined", key)) diags = diags.Extend(diag.Errorf("pipeline %s is not defined", key))
continue continue
} }

View File

@ -56,7 +56,7 @@ func resolveVolume(v *resources.Volume, b *bundle.Bundle) {
} }
func resolvePipelineSchema(p *resources.Pipeline, b *bundle.Bundle) { func resolvePipelineSchema(p *resources.Pipeline, b *bundle.Bundle) {
if p == nil || p.PipelineSpec == nil { if p == nil || p.CreatePipeline == nil {
return return
} }
if p.Schema == "" { if p.Schema == "" {
@ -71,7 +71,7 @@ func resolvePipelineSchema(p *resources.Pipeline, b *bundle.Bundle) {
} }
func resolvePipelineTarget(p *resources.Pipeline, b *bundle.Bundle) { func resolvePipelineTarget(p *resources.Pipeline, b *bundle.Bundle) {
if p == nil || p.PipelineSpec == nil { if p == nil || p.CreatePipeline == nil {
return return
} }
if p.Target == "" { if p.Target == "" {

View File

@ -118,43 +118,43 @@ func TestCaptureSchemaDependencyForPipelinesWithTarget(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline1": { "pipeline1": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog1", Catalog: "catalog1",
Schema: "foobar", Schema: "foobar",
}, },
}, },
"pipeline2": { "pipeline2": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog2", Catalog: "catalog2",
Schema: "foobar", Schema: "foobar",
}, },
}, },
"pipeline3": { "pipeline3": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog1", Catalog: "catalog1",
Schema: "barfoo", Schema: "barfoo",
}, },
}, },
"pipeline4": { "pipeline4": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalogX", Catalog: "catalogX",
Schema: "foobar", Schema: "foobar",
}, },
}, },
"pipeline5": { "pipeline5": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog1", Catalog: "catalog1",
Schema: "schemaX", Schema: "schemaX",
}, },
}, },
"pipeline6": { "pipeline6": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "", Catalog: "",
Schema: "foobar", Schema: "foobar",
}, },
}, },
"pipeline7": { "pipeline7": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "", Catalog: "",
Schema: "", Schema: "",
Name: "whatever", Name: "whatever",
@ -179,7 +179,7 @@ func TestCaptureSchemaDependencyForPipelinesWithTarget(t *testing.T) {
assert.Equal(t, "", b.Config.Resources.Pipelines["pipeline7"].Schema) assert.Equal(t, "", b.Config.Resources.Pipelines["pipeline7"].Schema)
assert.Nil(t, b.Config.Resources.Pipelines["nilPipeline"]) assert.Nil(t, b.Config.Resources.Pipelines["nilPipeline"])
assert.Nil(t, b.Config.Resources.Pipelines["emptyPipeline"].PipelineSpec) assert.Nil(t, b.Config.Resources.Pipelines["emptyPipeline"].CreatePipeline)
for _, k := range []string{"pipeline1", "pipeline2", "pipeline3", "pipeline4", "pipeline5", "pipeline6", "pipeline7"} { for _, k := range []string{"pipeline1", "pipeline2", "pipeline3", "pipeline4", "pipeline5", "pipeline6", "pipeline7"} {
assert.Empty(t, b.Config.Resources.Pipelines[k].Target) assert.Empty(t, b.Config.Resources.Pipelines[k].Target)
@ -214,43 +214,43 @@ func TestCaptureSchemaDependencyForPipelinesWithSchema(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline1": { "pipeline1": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog1", Catalog: "catalog1",
Target: "foobar", Target: "foobar",
}, },
}, },
"pipeline2": { "pipeline2": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog2", Catalog: "catalog2",
Target: "foobar", Target: "foobar",
}, },
}, },
"pipeline3": { "pipeline3": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog1", Catalog: "catalog1",
Target: "barfoo", Target: "barfoo",
}, },
}, },
"pipeline4": { "pipeline4": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalogX", Catalog: "catalogX",
Target: "foobar", Target: "foobar",
}, },
}, },
"pipeline5": { "pipeline5": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "catalog1", Catalog: "catalog1",
Target: "schemaX", Target: "schemaX",
}, },
}, },
"pipeline6": { "pipeline6": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "", Catalog: "",
Target: "foobar", Target: "foobar",
}, },
}, },
"pipeline7": { "pipeline7": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Catalog: "", Catalog: "",
Target: "", Target: "",
Name: "whatever", Name: "whatever",

View File

@ -47,7 +47,7 @@ func TestExpandGlobPathsInPipelines(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
Notebook: &pipelines.NotebookLibrary{ Notebook: &pipelines.NotebookLibrary{

View File

@ -31,8 +31,8 @@ func TestInitializeURLs(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline1": { "pipeline1": {
ID: "3", ID: "3",
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"}, CreatePipeline: &pipelines.CreatePipeline{Name: "pipeline1"},
}, },
}, },
Experiments: map[string]*resources.MlflowExperiment{ Experiments: map[string]*resources.MlflowExperiment{

View File

@ -19,7 +19,7 @@ func TestMergePipelineClusters(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"foo": { "foo": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Clusters: []pipelines.PipelineCluster{ Clusters: []pipelines.PipelineCluster{
{ {
NodeTypeId: "i3.xlarge", NodeTypeId: "i3.xlarge",
@ -68,7 +68,7 @@ func TestMergePipelineClustersCaseInsensitive(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"foo": { "foo": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Clusters: []pipelines.PipelineCluster{ Clusters: []pipelines.PipelineCluster{
{ {
Label: "default", Label: "default",

View File

@ -88,7 +88,7 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
}, },
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline1": {PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1", Continuous: true}}, "pipeline1": {CreatePipeline: &pipelines.CreatePipeline{Name: "pipeline1", Continuous: true}},
}, },
Experiments: map[string]*resources.MlflowExperiment{ Experiments: map[string]*resources.MlflowExperiment{
"experiment1": {Experiment: &ml.Experiment{Name: "/Users/lennart.kats@databricks.com/experiment1"}}, "experiment1": {Experiment: &ml.Experiment{Name: "/Users/lennart.kats@databricks.com/experiment1"}},
@ -181,7 +181,7 @@ func TestProcessTargetModeDevelopment(t *testing.T) {
// Pipeline 1 // Pipeline 1
assert.Equal(t, "[dev lennart] pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.Equal(t, "[dev lennart] pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].Continuous) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].Continuous)
assert.True(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) assert.True(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development)
// Experiment 1 // Experiment 1
assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev lennart] experiment1", b.Config.Resources.Experiments["experiment1"].Name) assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev lennart] experiment1", b.Config.Resources.Experiments["experiment1"].Name)
@ -316,7 +316,7 @@ func TestProcessTargetModeDefault(t *testing.T) {
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development)
assert.Equal(t, "servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) assert.Equal(t, "servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name)
assert.Equal(t, "registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name) assert.Equal(t, "registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name)
assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName) assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName)
@ -362,7 +362,7 @@ func TestProcessTargetModeProduction(t *testing.T) {
assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development)
assert.Equal(t, "servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) assert.Equal(t, "servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name)
assert.Equal(t, "registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name) assert.Equal(t, "registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name)
assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName) assert.Equal(t, "qualityMonitor1", b.Config.Resources.QualityMonitors["qualityMonitor1"].TableName)
@ -568,5 +568,5 @@ func TestPipelinesDevelopmentDisabled(t *testing.T) {
diags := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development)
} }

View File

@ -20,7 +20,7 @@ func TestResolveVariableReferencesWithSourceLinkedDeployment(t *testing.T) {
true, true,
func(t *testing.T, b *bundle.Bundle) { func(t *testing.T, b *bundle.Bundle) {
// Variables that use workspace file path should have SyncRootValue during resolution phase // Variables that use workspace file path should have SyncRootValue during resolution phase
require.Equal(t, "sync/root/path", b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Configuration["source"]) require.Equal(t, "sync/root/path", b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Configuration["source"])
// The file path itself should remain the same // The file path itself should remain the same
require.Equal(t, "file/path", b.Config.Workspace.FilePath) require.Equal(t, "file/path", b.Config.Workspace.FilePath)
@ -29,7 +29,7 @@ func TestResolveVariableReferencesWithSourceLinkedDeployment(t *testing.T) {
{ {
false, false,
func(t *testing.T, b *bundle.Bundle) { func(t *testing.T, b *bundle.Bundle) {
require.Equal(t, "file/path", b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Configuration["source"]) require.Equal(t, "file/path", b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Configuration["source"])
require.Equal(t, "file/path", b.Config.Workspace.FilePath) require.Equal(t, "file/path", b.Config.Workspace.FilePath)
}, },
}, },
@ -48,7 +48,7 @@ func TestResolveVariableReferencesWithSourceLinkedDeployment(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline1": { "pipeline1": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Configuration: map[string]string{ Configuration: map[string]string{
"source": "${workspace.file_path}", "source": "${workspace.file_path}",
}, },

View File

@ -179,7 +179,7 @@ func TestTranslatePaths(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
Notebook: &pipelines.NotebookLibrary{ Notebook: &pipelines.NotebookLibrary{
@ -333,7 +333,7 @@ func TestTranslatePathsInSubdirectories(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
File: &pipelines.FileLibrary{ File: &pipelines.FileLibrary{
@ -488,7 +488,7 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
Notebook: &pipelines.NotebookLibrary{ Notebook: &pipelines.NotebookLibrary{
@ -532,7 +532,7 @@ func TestPipelineNotebookDoesNotExistErrorWithoutExtension(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
Notebook: &pipelines.NotebookLibrary{ Notebook: &pipelines.NotebookLibrary{
@ -572,7 +572,7 @@ func TestPipelineFileDoesNotExistError(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
File: &pipelines.FileLibrary{ File: &pipelines.FileLibrary{
@ -677,7 +677,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
Notebook: &pipelines.NotebookLibrary{ Notebook: &pipelines.NotebookLibrary{
@ -712,7 +712,7 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
File: &pipelines.FileLibrary{ File: &pipelines.FileLibrary{
@ -916,7 +916,7 @@ func TestTranslatePathsWithSourceLinkedDeployment(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline": { "pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
Notebook: &pipelines.NotebookLibrary{ Notebook: &pipelines.NotebookLibrary{

View File

@ -16,7 +16,7 @@ type Pipeline struct {
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"` ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"` URL string `json:"url,omitempty" bundle:"internal"`
*pipelines.PipelineSpec *pipelines.CreatePipeline
} }
func (s *Pipeline) UnmarshalJSON(b []byte) error { func (s *Pipeline) UnmarshalJSON(b []byte) error {
@ -59,5 +59,5 @@ func (s *Pipeline) GetURL() string {
} }
func (s *Pipeline) IsNil() bool { func (s *Pipeline) IsNil() bool {
return s.PipelineSpec == nil return s.CreatePipeline == nil
} }

View File

@ -238,7 +238,7 @@ func TestValidateSingleNodeClusterFailForPipelineClusters(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"foo": { "foo": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Clusters: []pipelines.PipelineCluster{ Clusters: []pipelines.PipelineCluster{
{ {
SparkConf: tc.sparkConf, SparkConf: tc.sparkConf,
@ -493,7 +493,7 @@ func TestValidateSingleNodeClusterPassPipelineClusters(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"foo": { "foo": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Clusters: []pipelines.PipelineCluster{ Clusters: []pipelines.PipelineCluster{
{ {
SparkConf: tc.sparkConf, SparkConf: tc.sparkConf,

View File

@ -20,11 +20,11 @@ func (m *annotatePipelines) Name() string {
func (m *annotatePipelines) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *annotatePipelines) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
for _, pipeline := range b.Config.Resources.Pipelines { for _, pipeline := range b.Config.Resources.Pipelines {
if pipeline.PipelineSpec == nil { if pipeline.CreatePipeline == nil {
continue continue
} }
pipeline.PipelineSpec.Deployment = &pipelines.PipelineDeployment{ pipeline.CreatePipeline.Deployment = &pipelines.PipelineDeployment{
Kind: pipelines.DeploymentKindBundle, Kind: pipelines.DeploymentKindBundle,
MetadataFilePath: metadataFilePath(b), MetadataFilePath: metadataFilePath(b),
} }

View File

@ -21,12 +21,12 @@ func TestAnnotatePipelinesMutator(t *testing.T) {
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"my-pipeline-1": { "my-pipeline-1": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Name: "My Pipeline One", Name: "My Pipeline One",
}, },
}, },
"my-pipeline-2": { "my-pipeline-2": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Name: "My Pipeline Two", Name: "My Pipeline Two",
}, },
}, },
@ -43,14 +43,14 @@ func TestAnnotatePipelinesMutator(t *testing.T) {
Kind: pipelines.DeploymentKindBundle, Kind: pipelines.DeploymentKindBundle,
MetadataFilePath: "/a/b/c/metadata.json", MetadataFilePath: "/a/b/c/metadata.json",
}, },
b.Config.Resources.Pipelines["my-pipeline-1"].PipelineSpec.Deployment) b.Config.Resources.Pipelines["my-pipeline-1"].CreatePipeline.Deployment)
assert.Equal(t, assert.Equal(t,
&pipelines.PipelineDeployment{ &pipelines.PipelineDeployment{
Kind: pipelines.DeploymentKindBundle, Kind: pipelines.DeploymentKindBundle,
MetadataFilePath: "/a/b/c/metadata.json", MetadataFilePath: "/a/b/c/metadata.json",
}, },
b.Config.Resources.Pipelines["my-pipeline-2"].PipelineSpec.Deployment) b.Config.Resources.Pipelines["my-pipeline-2"].CreatePipeline.Deployment)
} }
func TestAnnotatePipelinesMutatorPipelineWithoutASpec(t *testing.T) { func TestAnnotatePipelinesMutatorPipelineWithoutASpec(t *testing.T) {

View File

@ -203,7 +203,7 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
func TestBundleToTerraformPipeline(t *testing.T) { func TestBundleToTerraformPipeline(t *testing.T) {
src := resources.Pipeline{ src := resources.Pipeline{
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Name: "my pipeline", Name: "my pipeline",
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
@ -759,7 +759,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"test_pipeline": { "test_pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Name: "test_pipeline", Name: "test_pipeline",
}, },
}, },
@ -898,12 +898,12 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"test_pipeline": { "test_pipeline": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Name: "test_pipeline", Name: "test_pipeline",
}, },
}, },
"test_pipeline_new": { "test_pipeline_new": {
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Name: "test_pipeline_new", Name: "test_pipeline_new",
}, },
}, },

View File

@ -21,6 +21,11 @@ func convertPipelineResource(ctx context.Context, vin dyn.Value) (dyn.Value, err
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }
vout, err = dyn.DropKeys(vout, []string{"allow_duplicate_names", "dry_run"})
if err != nil {
return dyn.InvalidValue, err
}
// Normalize the output value to the target schema. // Normalize the output value to the target schema.
vout, diags := convert.Normalize(schema.ResourcePipeline{}, vout) vout, diags := convert.Normalize(schema.ResourcePipeline{}, vout)
for _, diag := range diags { for _, diag := range diags {

View File

@ -15,8 +15,17 @@ import (
func TestConvertPipeline(t *testing.T) { func TestConvertPipeline(t *testing.T) {
src := resources.Pipeline{ src := resources.Pipeline{
PipelineSpec: &pipelines.PipelineSpec{ CreatePipeline: &pipelines.CreatePipeline{
Name: "my pipeline", Name: "my pipeline",
// This fields is not part of TF schema yet, but once we upgrade to TF version that supports it, this test will fail because run_as
// will be exposed which is expected and test will need to be updated.
RunAs: &pipelines.RunAs{
UserName: "foo@bar.com",
},
// We expect AllowDuplicateNames and DryRun to be ignored and not passed to the TF output.
// This is not supported by TF now, so we don't want to expose it.
AllowDuplicateNames: true,
DryRun: true,
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
{ {
Notebook: &pipelines.NotebookLibrary{ Notebook: &pipelines.NotebookLibrary{

View File

@ -130,6 +130,6 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
s.MarkdownDescription = a.MarkdownDescription s.MarkdownDescription = a.MarkdownDescription
} }
if a.MarkdownExamples != "" { if a.MarkdownExamples != "" {
s.Examples = []any{a.MarkdownExamples} s.Examples = []string{a.MarkdownExamples}
} }
} }

View File

@ -220,9 +220,9 @@ func isCycleField(field string) bool {
} }
func getExample(v *jsonschema.Schema) string { func getExample(v *jsonschema.Schema) string {
examples := v.Examples examples := getExamples(v.Examples)
if len(examples) == 0 { if len(examples) == 0 {
return "" return ""
} }
return examples[0].(string) return examples[0]
} }

View File

@ -58,7 +58,7 @@ func resolveRefs(s *jsonschema.Schema, schemas map[string]*jsonschema.Schema) *j
node := s node := s
description := s.Description description := s.Description
markdownDescription := s.MarkdownDescription markdownDescription := s.MarkdownDescription
examples := s.Examples examples := getExamples(s.Examples)
for node.Reference != nil { for node.Reference != nil {
ref := getRefType(node) ref := getRefType(node)
@ -75,7 +75,7 @@ func resolveRefs(s *jsonschema.Schema, schemas map[string]*jsonschema.Schema) *j
markdownDescription = newNode.MarkdownDescription markdownDescription = newNode.MarkdownDescription
} }
if len(examples) == 0 { if len(examples) == 0 {
examples = newNode.Examples examples = getExamples(newNode.Examples)
} }
node = newNode node = newNode
@ -89,6 +89,14 @@ func resolveRefs(s *jsonschema.Schema, schemas map[string]*jsonschema.Schema) *j
return &newNode return &newNode
} }
func getExamples(examples any) []string {
typedExamples, ok := examples.([]string)
if !ok {
return []string{}
}
return typedExamples
}
func getRefType(node *jsonschema.Schema) string { func getRefType(node *jsonschema.Schema) string {
if node.Reference == nil { if node.Reference == nil {
return "" return ""

View File

@ -414,6 +414,16 @@ github.com/databricks/cli/bundle/config/resources.Permission:
"user_name": "user_name":
"description": |- "description": |-
The name of the user that has the permission set in level. The name of the user that has the permission set in level.
github.com/databricks/cli/bundle/config/resources.Pipeline:
"allow_duplicate_names":
"description": |-
PLACEHOLDER
"dry_run":
"description": |-
PLACEHOLDER
"run_as":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/variable.Lookup: github.com/databricks/cli/bundle/config/variable.Lookup:
"alert": "alert":
"description": |- "description": |-

View File

@ -371,6 +371,9 @@ github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
"description": |- "description": |-
Tags to be attached to the serving endpoint and automatically propagated to billing logs. Tags to be attached to the serving endpoint and automatically propagated to billing logs.
github.com/databricks/cli/bundle/config/resources.Pipeline: github.com/databricks/cli/bundle/config/resources.Pipeline:
"allow_duplicate_names":
"description": |-
If false, deployment will fail if name conflicts with that of another pipeline.
"budget_policy_id": "budget_policy_id":
"description": |- "description": |-
Budget policy of this pipeline. Budget policy of this pipeline.
@ -395,6 +398,7 @@ github.com/databricks/cli/bundle/config/resources.Pipeline:
"development": "development":
"description": |- "description": |-
Whether the pipeline is in Development mode. Defaults to false. Whether the pipeline is in Development mode. Defaults to false.
"dry_run": {}
"edition": "edition":
"description": |- "description": |-
Pipeline product edition. Pipeline product edition.
@ -425,6 +429,7 @@ github.com/databricks/cli/bundle/config/resources.Pipeline:
"restart_window": "restart_window":
"description": |- "description": |-
Restart window of this pipeline. Restart window of this pipeline.
"run_as": {}
"schema": "schema":
"description": |- "description": |-
The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode. The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.
@ -2624,6 +2629,18 @@ github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindow:
"description": |- "description": |-
Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details. Time zone id of restart window. See https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html for details.
If not specified, UTC will be used. If not specified, UTC will be used.
github.com/databricks/databricks-sdk-go/service/pipelines.RunAs:
"_":
"description": |-
Write-only setting, available only in Create/Update calls. Specifies the user or service principal that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is thrown.
"service_principal_name":
"description": |-
Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.
"user_name":
"description": |-
The email of an active workspace user. Users can only set this field to their own email.
github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec: github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec:
"destination_catalog": "destination_catalog":
"description": |- "description": |-

View File

@ -60,7 +60,6 @@ github.com/databricks/cli/bundle/config/resources.Cluster:
"_": "_":
"markdown_description": |- "markdown_description": |-
The cluster resource defines an [all-purpose cluster](/api/workspace/clusters/create). The cluster resource defines an [all-purpose cluster](/api/workspace/clusters/create).
"markdown_examples": |- "markdown_examples": |-
The following example creates a cluster named `my_cluster` and sets that as the cluster to use to run the notebook in `my_job`: The following example creates a cluster named `my_cluster` and sets that as the cluster to use to run the notebook in `my_job`:
@ -123,7 +122,6 @@ github.com/databricks/cli/bundle/config/resources.Dashboard:
If you use the UI to modify the dashboard, modifications made through the UI are not applied to the dashboard JSON file in the local bundle unless you explicitly update it using `bundle generate`. You can use the `--watch` option to continuously poll and retrieve changes to the dashboard. See [_](/dev-tools/cli/bundle-commands.md#generate). If you use the UI to modify the dashboard, modifications made through the UI are not applied to the dashboard JSON file in the local bundle unless you explicitly update it using `bundle generate`. You can use the `--watch` option to continuously poll and retrieve changes to the dashboard. See [_](/dev-tools/cli/bundle-commands.md#generate).
In addition, if you attempt to deploy a bundle that contains a dashboard JSON file that is different than the one in the remote workspace, an error will occur. To force the deploy and overwrite the dashboard in the remote workspace with the local one, use the `--force` option. See [_](/dev-tools/cli/bundle-commands.md#deploy). In addition, if you attempt to deploy a bundle that contains a dashboard JSON file that is different than the one in the remote workspace, an error will occur. To force the deploy and overwrite the dashboard in the remote workspace with the local one, use the `--force` option. See [_](/dev-tools/cli/bundle-commands.md#deploy).
"embed_credentials": "embed_credentials":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
@ -241,9 +239,15 @@ github.com/databricks/cli/bundle/config/resources.Pipeline:
- notebook: - notebook:
path: ./pipeline.py path: ./pipeline.py
``` ```
"dry_run":
"description": |-
PLACEHOLDER
"permissions": "permissions":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
"run_as":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.QualityMonitor: github.com/databricks/cli/bundle/config/resources.QualityMonitor:
"_": "_":
"markdown_description": |- "markdown_description": |-
@ -356,7 +360,6 @@ github.com/databricks/cli/bundle/config/resources.Volume:
- A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use <DABS> to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments. - A volume cannot be referenced in the `artifact_path` for the bundle until it exists in the workspace. Hence, if you want to use <DABS> to create the volume, you must first define the volume in the bundle, deploy it to create the volume, then reference it in the `artifact_path` in subsequent deployments.
- Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets). - Volumes in the bundle are not prepended with the `dev_${workspace.current_user.short_name}` prefix when the deployment target has `mode: development` configured. However, you can manually configure this prefix. See [_](/dev-tools/bundles/deployment-modes.md#custom-presets).
"markdown_examples": |- "markdown_examples": |-
The following example creates a <UC> volume with the key `my_volume`: The following example creates a <UC> volume with the key `my_volume`:
@ -376,6 +379,42 @@ github.com/databricks/cli/bundle/config/resources.Volume:
"volume_type": "volume_type":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResource: github.com/databricks/databricks-sdk-go/service/apps.AppResource:
"job": "job":
"description": |- "description": |-
@ -389,6 +428,49 @@ github.com/databricks/databricks-sdk-go/service/apps.AppResource:
"sql_warehouse": "sql_warehouse":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"name":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state": {}
github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes: github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes:
"availability": "availability":
"description": |- "description": |-
@ -473,85 +555,6 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
"manual": "manual":
"description": |- "description": |-
PLACEHOLDER PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"name":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
"entity_version": "entity_version":
"description": |- "description": |-

View File

@ -109,6 +109,20 @@ func removeJobsFields(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
return s return s
} }
func removePipelineFields(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
switch typ {
case reflect.TypeOf(resources.Pipeline{}):
// Even though DABs supports this field, TF provider does not. Thus, we
// should not expose it to the user.
delete(s.Properties, "dry_run")
delete(s.Properties, "allow_duplicate_names")
default:
// Do nothing
}
return s
}
// While volume_type is required in the volume create API, DABs automatically sets // While volume_type is required in the volume create API, DABs automatically sets
// it's value to "MANAGED" if it's not provided. Thus, we make it optional // it's value to "MANAGED" if it's not provided. Thus, we make it optional
// in the bundle schema. // in the bundle schema.
@ -168,6 +182,7 @@ func generateSchema(workdir, outputFile string) {
// Generate the JSON schema from the bundle Go struct. // Generate the JSON schema from the bundle Go struct.
s, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{ s, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
removeJobsFields, removeJobsFields,
removePipelineFields,
makeVolumeTypeOptional, makeVolumeTypeOptional,
a.addAnnotations, a.addAnnotations,
addInterpolationPatterns, addInterpolationPatterns,

View File

@ -1,6 +1,7 @@
package main package main
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"os" "os"
@ -9,8 +10,9 @@ import (
"strings" "strings"
"github.com/databricks/cli/bundle/internal/annotation" "github.com/databricks/cli/bundle/internal/annotation"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/jsonschema" "github.com/databricks/cli/libs/jsonschema"
"gopkg.in/yaml.v3"
) )
type Components struct { type Components struct {
@ -122,7 +124,11 @@ func (p *openapiParser) extractAnnotations(typ reflect.Type, outputPath, overrid
if err != nil { if err != nil {
return err return err
} }
err = yaml.Unmarshal(b, &overrides) overridesDyn, err := yamlloader.LoadYAML(overridesPath, bytes.NewBuffer(b))
if err != nil {
return err
}
err = convert.ToTyped(&overrides, overridesDyn)
if err != nil { if err != nil {
return err return err
} }

View File

@ -92,7 +92,7 @@ func expandLibraries(b *bundle.Bundle, p dyn.Path, v dyn.Value) (diag.Diagnostic
for _, match := range matches { for _, match := range matches {
output = append(output, dyn.NewValue(map[string]dyn.Value{ output = append(output, dyn.NewValue(map[string]dyn.Value{
libType: dyn.V(match), libType: dyn.NewValue(match, lib.Locations()),
}, lib.Locations())) }, lib.Locations()))
} }
} }

View File

@ -0,0 +1,97 @@
package libraries
import (
"context"
"path/filepath"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
)
type checkForSameNameLibraries struct{}
var patterns = []dyn.Pattern{
taskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()),
forEachTaskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()),
envDepsPattern.Append(dyn.AnyIndex()),
}
type libData struct {
fullPath string
locations []dyn.Location
paths []dyn.Path
}
func (c checkForSameNameLibraries) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
libs := make(map[string]*libData)
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
var err error
for _, pattern := range patterns {
v, err = dyn.MapByPattern(v, pattern, func(p dyn.Path, lv dyn.Value) (dyn.Value, error) {
libPath := lv.MustString()
// If not local library, skip the check
if !IsLibraryLocal(libPath) {
return lv, nil
}
libFullPath := lv.MustString()
lib := filepath.Base(libFullPath)
// If the same basename was seen already but full path is different
// then it's a duplicate. Add the location to the location list.
lp, ok := libs[lib]
if !ok {
libs[lib] = &libData{
fullPath: libFullPath,
locations: []dyn.Location{lv.Location()},
paths: []dyn.Path{p},
}
} else if lp.fullPath != libFullPath {
lp.locations = append(lp.locations, lv.Location())
lp.paths = append(lp.paths, p)
}
return lv, nil
})
if err != nil {
return dyn.InvalidValue, err
}
}
if err != nil {
return dyn.InvalidValue, err
}
return v, nil
})
// Iterate over all the libraries and check if there are any duplicates.
// Duplicates will have more than one location.
// If there are duplicates, add a diagnostic.
for lib, lv := range libs {
if len(lv.locations) > 1 {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Duplicate local library name " + lib,
Detail: "Local library names must be unique",
Locations: lv.locations,
Paths: lv.paths,
})
}
}
if err != nil {
diags = diags.Extend(diag.FromErr(err))
}
return diags
}
func (c checkForSameNameLibraries) Name() string {
return "CheckForSameNameLibraries"
}
func CheckForSameNameLibraries() bundle.Mutator {
return checkForSameNameLibraries{}
}

View File

@ -0,0 +1,121 @@
package libraries
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/require"
)
func TestSameNameLibraries(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test": {
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
{
Libraries: []compute.Library{
{
Whl: "full/path/test.whl",
},
},
},
{
Libraries: []compute.Library{
{
Whl: "other/path/test.whl",
},
},
},
},
},
},
},
},
},
}
bundletest.SetLocation(b, "resources.jobs.test.tasks[0]", []dyn.Location{
{File: "databricks.yml", Line: 10, Column: 1},
})
bundletest.SetLocation(b, "resources.jobs.test.tasks[1]", []dyn.Location{
{File: "databricks.yml", Line: 20, Column: 1},
})
diags := bundle.Apply(context.Background(), b, CheckForSameNameLibraries())
require.Len(t, diags, 1)
require.Equal(t, diag.Error, diags[0].Severity)
require.Equal(t, "Duplicate local library name test.whl", diags[0].Summary)
require.Equal(t, []dyn.Location{
{File: "databricks.yml", Line: 10, Column: 1},
{File: "databricks.yml", Line: 20, Column: 1},
}, diags[0].Locations)
paths := make([]string, 0)
for _, p := range diags[0].Paths {
paths = append(paths, p.String())
}
require.Equal(t, []string{
"resources.jobs.test.tasks[0].libraries[0].whl",
"resources.jobs.test.tasks[1].libraries[0].whl",
}, paths)
}
func TestSameNameLibrariesWithUniqueLibraries(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test": {
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
{
Libraries: []compute.Library{
{
Whl: "full/path/test-0.1.1.whl",
},
{
Whl: "cowsay",
},
},
},
{
Libraries: []compute.Library{
{
Whl: "other/path/test-0.1.0.whl",
},
{
Whl: "cowsay",
},
},
},
{
Libraries: []compute.Library{
{
Whl: "full/path/test-0.1.1.whl", // Use the same library as the first task
},
},
},
},
},
},
},
},
},
}
diags := bundle.Apply(context.Background(), b, CheckForSameNameLibraries())
require.Empty(t, diags)
}

View File

@ -42,7 +42,7 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) diag.Diagnostics {
// such that they are not logged multiple times. // such that they are not logged multiple times.
// If this is done, we can omit this block. // If this is done, we can omit this block.
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
log.Errorf(ctx, "Error: %s", err) log.Debugf(ctx, "Error: %s", err)
} }
return diags return diags

View File

@ -22,7 +22,7 @@ func ApplyReadOnly(ctx context.Context, rb ReadOnlyBundle, m ReadOnlyMutator) di
log.Debugf(ctx, "ApplyReadOnly") log.Debugf(ctx, "ApplyReadOnly")
diags := m.Apply(ctx, rb) diags := m.Apply(ctx, rb)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
log.Errorf(ctx, "Error: %s", err) log.Debugf(ctx, "Error: %s", err)
} }
return diags return diags

View File

@ -38,8 +38,8 @@ func TestApplyWorkspaceRootPermissions(t *testing.T) {
"job_2": {JobSettings: &jobs.JobSettings{Name: "job_2"}}, "job_2": {JobSettings: &jobs.JobSettings{Name: "job_2"}},
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline_1": {PipelineSpec: &pipelines.PipelineSpec{}}, "pipeline_1": {CreatePipeline: &pipelines.CreatePipeline{}},
"pipeline_2": {PipelineSpec: &pipelines.PipelineSpec{}}, "pipeline_2": {CreatePipeline: &pipelines.CreatePipeline{}},
}, },
Models: map[string]*resources.MlflowModel{ Models: map[string]*resources.MlflowModel{
"model_1": {Model: &ml.Model{}}, "model_1": {Model: &ml.Model{}},
@ -98,8 +98,8 @@ func TestApplyWorkspaceRootPermissionsForAllPaths(t *testing.T) {
"job_2": {JobSettings: &jobs.JobSettings{Name: "job_2"}}, "job_2": {JobSettings: &jobs.JobSettings{Name: "job_2"}},
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"pipeline_1": {PipelineSpec: &pipelines.PipelineSpec{}}, "pipeline_1": {CreatePipeline: &pipelines.CreatePipeline{}},
"pipeline_2": {PipelineSpec: &pipelines.PipelineSpec{}}, "pipeline_2": {CreatePipeline: &pipelines.CreatePipeline{}},
}, },
Models: map[string]*resources.MlflowModel{ Models: map[string]*resources.MlflowModel{
"model_1": {Model: &ml.Model{}}, "model_1": {Model: &ml.Model{}},

View File

@ -155,6 +155,11 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
mutator.ValidateGitDetails(), mutator.ValidateGitDetails(),
artifacts.CleanUp(), artifacts.CleanUp(),
libraries.ExpandGlobReferences(), libraries.ExpandGlobReferences(),
// libraries.CheckForSameNameLibraries() needs to be run after we expand glob references so we
// know what are the actual library paths.
// libraries.ExpandGlobReferences() has to be run after the libraries are built and thus this
// mutator is part of the deploy step rather than validate.
libraries.CheckForSameNameLibraries(),
libraries.Upload(), libraries.Upload(),
trampoline.TransformWheelTask(), trampoline.TransformWheelTask(),
files.Upload(outputHandler), files.Upload(outputHandler),

View File

@ -530,12 +530,12 @@ func TestRenderSummary(t *testing.T) {
"pipeline2": { "pipeline2": {
ID: "4", ID: "4",
// no URL // no URL
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline2-name"}, CreatePipeline: &pipelines.CreatePipeline{Name: "pipeline2-name"},
}, },
"pipeline1": { "pipeline1": {
ID: "3", ID: "3",
URL: "https://url3", URL: "https://url3",
PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1-name"}, CreatePipeline: &pipelines.CreatePipeline{Name: "pipeline1-name"},
}, },
}, },
Schemas: map[string]*resources.Schema{ Schemas: map[string]*resources.Schema{

View File

@ -25,7 +25,7 @@ func TestCompletions_SkipDuplicates(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"foo": { "foo": {
PipelineSpec: &pipelines.PipelineSpec{}, CreatePipeline: &pipelines.CreatePipeline{},
}, },
}, },
}, },
@ -50,7 +50,7 @@ func TestCompletions_Filter(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"bar": { "bar": {
PipelineSpec: &pipelines.PipelineSpec{}, CreatePipeline: &pipelines.CreatePipeline{},
}, },
}, },
}, },

View File

@ -56,7 +56,7 @@ func TestLookup_MultipleFound(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"foo": { "foo": {
PipelineSpec: &pipelines.PipelineSpec{}, CreatePipeline: &pipelines.CreatePipeline{},
}, },
}, },
}, },
@ -107,7 +107,7 @@ func TestLookup_NominalWithFilters(t *testing.T) {
}, },
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"bar": { "bar": {
PipelineSpec: &pipelines.PipelineSpec{}, CreatePipeline: &pipelines.CreatePipeline{},
}, },
}, },
}, },

Some files were not shown because too many files have changed in this diff Show More