Merge branch 'main' into denik/wheel-patch

This commit is contained in:
Denis Bilenko 2025-03-05 16:08:20 +01:00 committed by GitHub
commit 9568ce19ac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 139 additions and 232 deletions

View File

@ -145,7 +145,10 @@ jobs:
go run main.go bundle schema > schema.json go run main.go bundle schema > schema.json
# Add markdownDescription keyword to ajv # Add markdownDescription keyword to ajv
echo "module.exports=function(a){a.addKeyword('markdownDescription')}" >> keywords.js echo "module.exports = function(a) {
a.addKeyword('markdownDescription');
a.addKeyword('deprecationMessage');
}" >> keywords.js
for file in ./bundle/internal/schema/testdata/pass/*.yml; do for file in ./bundle/internal/schema/testdata/pass/*.yml; do
ajv test -s schema.json -d $file --valid -c=./keywords.js ajv test -s schema.json -d $file --valid -c=./keywords.js

View File

@ -6,3 +6,4 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318 # Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore mv .gitignore out.gitignore
rm .databricks/.gitignore

View File

@ -48,7 +48,7 @@
- catalog: main - catalog: main
+ ## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog: + ## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
+ # catalog: catalog_name + # catalog: catalog_name
target: my_default_python_${bundle.target} schema: my_default_python_${bundle.target}
- serverless: true - serverless: true
libraries: libraries:
- notebook: - notebook:

View File

@ -5,7 +5,7 @@ resources:
name: my_default_python_pipeline name: my_default_python_pipeline
## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog: ## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
# catalog: catalog_name # catalog: catalog_name
target: my_default_python_${bundle.target} schema: my_default_python_${bundle.target}
libraries: libraries:
- notebook: - notebook:
path: ../src/dlt_pipeline.ipynb path: ../src/dlt_pipeline.ipynb

View File

@ -6,6 +6,7 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318 # Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore mv .gitignore out.gitignore
rm .databricks/.gitignore
cd ../../ cd ../../

View File

@ -18,5 +18,5 @@ See also the documentation at https://docs.databricks.com/dev-tools/bundles/inde
- ## Catalog is required for serverless compute - ## Catalog is required for serverless compute
- catalog: main - catalog: main
+ catalog: customcatalog + catalog: customcatalog
target: my_default_python_${bundle.target} schema: my_default_python_${bundle.target}
serverless: true serverless: true

View File

@ -5,7 +5,7 @@ resources:
name: my_default_python_pipeline name: my_default_python_pipeline
## Catalog is required for serverless compute ## Catalog is required for serverless compute
catalog: main catalog: main
target: my_default_python_${bundle.target} schema: my_default_python_${bundle.target}
serverless: true serverless: true
libraries: libraries:
- notebook: - notebook:

View File

@ -6,3 +6,4 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318 # Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore mv .gitignore out.gitignore
rm .databricks/.gitignore

View File

@ -6,3 +6,6 @@ trace $CLI bundle validate -t prod
# Do not affect this repository's git behaviour #2318 # Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore mv .gitignore out.gitignore
# Only for this test (default-sql), record .databricks/.gitignore in the output
mv .databricks/.gitignore .databricks/out.gitignore

View File

@ -11,3 +11,4 @@ rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info
# Do not affect this repository's git behaviour #2318 # Do not affect this repository's git behaviour #2318
mv .gitignore out.gitignore mv .gitignore out.gitignore
rm .databricks/.gitignore

View File

@ -21,6 +21,7 @@ import (
"github.com/databricks/cli/libs/fileset" "github.com/databricks/cli/libs/fileset"
"github.com/databricks/cli/libs/locker" "github.com/databricks/cli/libs/locker"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
libsync "github.com/databricks/cli/libs/sync"
"github.com/databricks/cli/libs/tags" "github.com/databricks/cli/libs/tags"
"github.com/databricks/cli/libs/terraform" "github.com/databricks/cli/libs/terraform"
"github.com/databricks/cli/libs/vfs" "github.com/databricks/cli/libs/vfs"
@ -198,6 +199,7 @@ func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error)
return "", err return "", err
} }
libsync.WriteGitIgnore(ctx, b.BundleRootPath)
return dir, nil return dir, nil
} }

View File

@ -52,7 +52,7 @@ type Root struct {
Targets map[string]*Target `json:"targets,omitempty"` Targets map[string]*Target `json:"targets,omitempty"`
// DEPRECATED. Left for backward compatibility with Targets // DEPRECATED. Left for backward compatibility with Targets
Environments map[string]*Target `json:"environments,omitempty" bundle:"deprecated"` Environments map[string]*Target `json:"environments,omitempty"`
// Sync section specifies options for files synchronization // Sync section specifies options for files synchronization
Sync Sync `json:"sync,omitempty"` Sync Sync `json:"sync,omitempty"`

View File

@ -7,6 +7,7 @@ type Descriptor struct {
Default any `json:"default,omitempty"` Default any `json:"default,omitempty"`
Enum []any `json:"enum,omitempty"` Enum []any `json:"enum,omitempty"`
MarkdownExamples string `json:"markdown_examples,omitempty"` MarkdownExamples string `json:"markdown_examples,omitempty"`
DeprecationMessage string `json:"deprecation_message,omitempty"`
} }
const Placeholder = "PLACEHOLDER" const Placeholder = "PLACEHOLDER"

View File

@ -127,6 +127,12 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
if a.Default != nil { if a.Default != nil {
s.Default = a.Default s.Default = a.Default
} }
if a.DeprecationMessage != "" {
s.Deprecated = true
s.DeprecationMessage = a.DeprecationMessage
}
s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription) s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription)
s.Title = a.Title s.Title = a.Title
s.Enum = a.Enum s.Enum = a.Enum

View File

@ -61,6 +61,8 @@ github.com/databricks/cli/bundle/config.Experimental:
"pydabs": "pydabs":
"description": |- "description": |-
The PyDABs configuration. The PyDABs configuration.
"deprecation_message": |-
Deprecated: please use python instead
"python": "python":
"description": |- "description": |-
Configures loading of Python code defined with 'databricks-bundles' package. Configures loading of Python code defined with 'databricks-bundles' package.
@ -220,6 +222,11 @@ github.com/databricks/cli/bundle/config.Root:
The bundle attributes when deploying to this target. The bundle attributes when deploying to this target.
"markdown_description": |- "markdown_description": |-
The bundle attributes when deploying to this target, The bundle attributes when deploying to this target,
"environments":
"description": |-
PLACEHOLDER
"deprecation_message": |-
Deprecated: please use targets instead
"experimental": "experimental":
"description": |- "description": |-
Defines attributes for experimental features. Defines attributes for experimental features.
@ -308,6 +315,8 @@ github.com/databricks/cli/bundle/config.Target:
"compute_id": "compute_id":
"description": |- "description": |-
Deprecated. The ID of the compute to use for this target. Deprecated. The ID of the compute to use for this target.
"deprecation_message": |-
Deprecated: please use cluster_id instead
"default": "default":
"description": |- "description": |-
Whether this target is the default target. Whether this target is the default target.

View File

@ -1217,7 +1217,9 @@
"properties": { "properties": {
"pydabs": { "pydabs": {
"description": "The PyDABs configuration.", "description": "The PyDABs configuration.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs" "$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs",
"deprecationMessage": "Deprecated: please use python instead",
"deprecated": true
}, },
"python": { "python": {
"description": "Configures loading of Python code defined with 'databricks-bundles' package.", "description": "Configures loading of Python code defined with 'databricks-bundles' package.",
@ -1506,7 +1508,9 @@
}, },
"compute_id": { "compute_id": {
"description": "Deprecated. The ID of the compute to use for this target.", "description": "Deprecated. The ID of the compute to use for this target.",
"$ref": "#/$defs/string" "$ref": "#/$defs/string",
"deprecationMessage": "Deprecated: please use cluster_id instead",
"deprecated": true
}, },
"default": { "default": {
"description": "Whether this target is the default target.", "description": "Whether this target is the default target.",
@ -7409,6 +7413,11 @@
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle", "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle",
"markdownDescription": "The bundle attributes when deploying to this target," "markdownDescription": "The bundle attributes when deploying to this target,"
}, },
"environments": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target",
"deprecationMessage": "Deprecated: please use targets instead",
"deprecated": true
},
"experimental": { "experimental": {
"description": "Defines attributes for experimental features.", "description": "Defines attributes for experimental features.",
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental" "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental"

View File

@ -164,7 +164,7 @@
} }
], ],
"name": "[dev [USERNAME]] project_name_$UNIQUE_PRJ_pipeline", "name": "[dev [USERNAME]] project_name_$UNIQUE_PRJ_pipeline",
"target": "project_name_$UNIQUE_PRJ_dev", "schema": "project_name_$UNIQUE_PRJ_dev",
"url": "[DATABRICKS_URL]/pipelines/[UUID]?o=[NUMID]" "url": "[DATABRICKS_URL]/pipelines/[UUID]?o=[NUMID]"
} }
} }

View File

@ -224,22 +224,21 @@ func (a *syncTest) snapshotContains(files []string) {
_, ok := s.LastModifiedTimes[filePath] _, ok := s.LastModifiedTimes[filePath]
assert.True(a.t, ok, "%s not in snapshot file: %v", filePath, s.LastModifiedTimes) assert.True(a.t, ok, "%s not in snapshot file: %v", filePath, s.LastModifiedTimes)
} }
assert.Equal(a.t, len(files), len(s.LastModifiedTimes)) assert.Equal(a.t, len(files), len(s.LastModifiedTimes), "files=%s s.LastModifiedTimes=%s", files, s.LastModifiedTimes)
} }
func TestSyncFullFileSync(t *testing.T) { func TestSyncFullFileSync(t *testing.T) {
ctx, assertSync := setupSyncTest(t, "--full", "--watch") ctx, assertSync := setupSyncTest(t, "--full", "--watch")
// .gitignore is created by the sync process to enforce .databricks is not synced
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
// New file // New file
localFilePath := filepath.Join(assertSync.localRoot, "foo.txt") localFilePath := filepath.Join(assertSync.localRoot, "foo.txt")
f := testfile.CreateFile(t, localFilePath) f := testfile.CreateFile(t, localFilePath)
defer f.Close(t) defer f.Close(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo.txt", ".gitignore")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo.txt"))
assertSync.remoteFileContent(ctx, "foo.txt", "") assertSync.remoteFileContent(ctx, "foo.txt", "")
// Write to file // Write to file
@ -255,24 +254,23 @@ func TestSyncFullFileSync(t *testing.T) {
// delete // delete
f.Remove(t) f.Remove(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
} }
func TestSyncIncrementalFileSync(t *testing.T) { func TestSyncIncrementalFileSync(t *testing.T) {
ctx, assertSync := setupSyncTest(t, "--watch") ctx, assertSync := setupSyncTest(t, "--watch")
// .gitignore is created by the sync process to enforce .databricks is not synced
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
// New file // New file
localFilePath := filepath.Join(assertSync.localRoot, "foo.txt") localFilePath := filepath.Join(assertSync.localRoot, "foo.txt")
f := testfile.CreateFile(t, localFilePath) f := testfile.CreateFile(t, localFilePath)
defer f.Close(t) defer f.Close(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo.txt", ".gitignore")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo.txt"))
assertSync.remoteFileContent(ctx, "foo.txt", "") assertSync.remoteFileContent(ctx, "foo.txt", "")
assertSync.snapshotContains(append(repoFiles, "foo.txt", ".gitignore")) assertSync.snapshotContains(append(repoFiles, "foo.txt"))
// Write to file // Write to file
f.Overwrite(t, `{"statement": "Mi Gente"}`) f.Overwrite(t, `{"statement": "Mi Gente"}`)
@ -287,16 +285,15 @@ func TestSyncIncrementalFileSync(t *testing.T) {
// delete // delete
f.Remove(t) f.Remove(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(repoFiles)
} }
func TestSyncNestedFolderSync(t *testing.T) { func TestSyncNestedFolderSync(t *testing.T) {
ctx, assertSync := setupSyncTest(t, "--watch") ctx, assertSync := setupSyncTest(t, "--watch")
// .gitignore is created by the sync process to enforce .databricks is not synced
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
// New file // New file
localFilePath := filepath.Join(assertSync.localRoot, "dir1/dir2/dir3/foo.txt") localFilePath := filepath.Join(assertSync.localRoot, "dir1/dir2/dir3/foo.txt")
@ -305,25 +302,24 @@ func TestSyncNestedFolderSync(t *testing.T) {
f := testfile.CreateFile(t, localFilePath) f := testfile.CreateFile(t, localFilePath)
defer f.Close(t) defer f.Close(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "dir1")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "dir1"))
assertSync.remoteDirContent(ctx, "dir1", []string{"dir2"}) assertSync.remoteDirContent(ctx, "dir1", []string{"dir2"})
assertSync.remoteDirContent(ctx, "dir1/dir2", []string{"dir3"}) assertSync.remoteDirContent(ctx, "dir1/dir2", []string{"dir3"})
assertSync.remoteDirContent(ctx, "dir1/dir2/dir3", []string{"foo.txt"}) assertSync.remoteDirContent(ctx, "dir1/dir2/dir3", []string{"foo.txt"})
assertSync.snapshotContains(append(repoFiles, ".gitignore", "dir1/dir2/dir3/foo.txt")) assertSync.snapshotContains(append(repoFiles, "dir1/dir2/dir3/foo.txt"))
// delete // delete
f.Remove(t) f.Remove(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteNotExist(ctx, "dir1") assertSync.remoteNotExist(ctx, "dir1")
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(repoFiles)
} }
func TestSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) { func TestSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) {
ctx, assertSync := setupSyncTest(t, "--watch") ctx, assertSync := setupSyncTest(t, "--watch")
// .gitignore is created by the sync process to enforce .databricks is not synced
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
// New file // New file
localFilePath := filepath.Join(assertSync.localRoot, "dir1/dir2/dir3/foo.txt") localFilePath := filepath.Join(assertSync.localRoot, "dir1/dir2/dir3/foo.txt")
@ -353,9 +349,8 @@ func TestSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) {
func TestSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) { func TestSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) {
ctx, assertSync := setupSyncTest(t, "--watch") ctx, assertSync := setupSyncTest(t, "--watch")
// .gitignore is created by the sync process to enforce .databricks is not synced
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
// New file // New file
localFilePath := filepath.Join(assertSync.localRoot, "dir1/a b+c/c+d e/e+f g#i.txt") localFilePath := filepath.Join(assertSync.localRoot, "dir1/a b+c/c+d e/e+f g#i.txt")
@ -364,17 +359,17 @@ func TestSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) {
f := testfile.CreateFile(t, localFilePath) f := testfile.CreateFile(t, localFilePath)
defer f.Close(t) defer f.Close(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "dir1")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "dir1"))
assertSync.remoteDirContent(ctx, "dir1", []string{"a b+c"}) assertSync.remoteDirContent(ctx, "dir1", []string{"a b+c"})
assertSync.remoteDirContent(ctx, "dir1/a b+c", []string{"c+d e"}) assertSync.remoteDirContent(ctx, "dir1/a b+c", []string{"c+d e"})
assertSync.remoteDirContent(ctx, "dir1/a b+c/c+d e", []string{"e+f g#i.txt"}) assertSync.remoteDirContent(ctx, "dir1/a b+c/c+d e", []string{"e+f g#i.txt"})
assertSync.snapshotContains(append(repoFiles, ".gitignore", "dir1/a b+c/c+d e/e+f g#i.txt")) assertSync.snapshotContains(append(repoFiles, "dir1/a b+c/c+d e/e+f g#i.txt"))
// delete // delete
f.Remove(t) f.Remove(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteNotExist(ctx, "dir1/a b+c/c+d e") assertSync.remoteNotExist(ctx, "dir1/a b+c/c+d e")
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(repoFiles)
} }
// This is a check for the edge case when a user does the following: // This is a check for the edge case when a user does the following:
@ -395,23 +390,23 @@ func TestSyncIncrementalFileOverwritesFolder(t *testing.T) {
f := testfile.CreateFile(t, localFilePath) f := testfile.CreateFile(t, localFilePath)
defer f.Close(t) defer f.Close(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "foo")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo"))
assertSync.remoteDirContent(ctx, "foo", []string{"bar.txt"}) assertSync.remoteDirContent(ctx, "foo", []string{"bar.txt"})
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo/bar.txt")) assertSync.snapshotContains(append(repoFiles, "foo/bar.txt"))
// delete foo/bar.txt // delete foo/bar.txt
f.Remove(t) f.Remove(t)
os.Remove(filepath.Join(assertSync.localRoot, "foo")) os.Remove(filepath.Join(assertSync.localRoot, "foo"))
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteNotExist(ctx, "foo") assertSync.remoteNotExist(ctx, "foo")
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(repoFiles)
f2 := testfile.CreateFile(t, filepath.Join(assertSync.localRoot, "foo")) f2 := testfile.CreateFile(t, filepath.Join(assertSync.localRoot, "foo"))
defer f2.Close(t) defer f2.Close(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "foo")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo"))
assertSync.objectType(ctx, "foo", "FILE") assertSync.objectType(ctx, "foo", "FILE")
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo")) assertSync.snapshotContains(append(repoFiles, "foo"))
} }
func TestSyncIncrementalSyncPythonNotebookToFile(t *testing.T) { func TestSyncIncrementalSyncPythonNotebookToFile(t *testing.T) {
@ -425,23 +420,23 @@ func TestSyncIncrementalSyncPythonNotebookToFile(t *testing.T) {
// notebook was uploaded properly // notebook was uploaded properly
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "foo")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo"))
assertSync.objectType(ctx, "foo", "NOTEBOOK") assertSync.objectType(ctx, "foo", "NOTEBOOK")
assertSync.language(ctx, "foo", "PYTHON") assertSync.language(ctx, "foo", "PYTHON")
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py")) assertSync.snapshotContains(append(repoFiles, "foo.py"))
// convert to vanilla python file // convert to vanilla python file
f.Overwrite(t, "# No longer a python notebook") f.Overwrite(t, "# No longer a python notebook")
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.objectType(ctx, "foo.py", "FILE") assertSync.objectType(ctx, "foo.py", "FILE")
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "foo.py")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo.py"))
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py")) assertSync.snapshotContains(append(repoFiles, "foo.py"))
// delete the vanilla python file // delete the vanilla python file
f.Remove(t) f.Remove(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
assertSync.snapshotContains(append(repoFiles, ".gitignore")) assertSync.snapshotContains(repoFiles)
} }
func TestSyncIncrementalSyncFileToPythonNotebook(t *testing.T) { func TestSyncIncrementalSyncFileToPythonNotebook(t *testing.T) {
@ -454,17 +449,17 @@ func TestSyncIncrementalSyncFileToPythonNotebook(t *testing.T) {
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
// assert file upload // assert file upload
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "foo.py")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo.py"))
assertSync.objectType(ctx, "foo.py", "FILE") assertSync.objectType(ctx, "foo.py", "FILE")
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py")) assertSync.snapshotContains(append(repoFiles, "foo.py"))
// convert to notebook // convert to notebook
f.Overwrite(t, "# Databricks notebook source") f.Overwrite(t, "# Databricks notebook source")
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.objectType(ctx, "foo", "NOTEBOOK") assertSync.objectType(ctx, "foo", "NOTEBOOK")
assertSync.language(ctx, "foo", "PYTHON") assertSync.language(ctx, "foo", "PYTHON")
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "foo")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo"))
assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py")) assertSync.snapshotContains(append(repoFiles, "foo.py"))
} }
func TestSyncIncrementalSyncPythonNotebookDelete(t *testing.T) { func TestSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
@ -478,14 +473,14 @@ func TestSyncIncrementalSyncPythonNotebookDelete(t *testing.T) {
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
// notebook was uploaded properly // notebook was uploaded properly
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore", "foo")) assertSync.remoteDirContent(ctx, "", append(repoFiles, "foo"))
assertSync.objectType(ctx, "foo", "NOTEBOOK") assertSync.objectType(ctx, "foo", "NOTEBOOK")
assertSync.language(ctx, "foo", "PYTHON") assertSync.language(ctx, "foo", "PYTHON")
// Delete notebook // Delete notebook
f.Remove(t) f.Remove(t)
assertSync.waitForCompletionMarker() assertSync.waitForCompletionMarker()
assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) assertSync.remoteDirContent(ctx, "", repoFiles)
} }
func TestSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { func TestSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) {

View File

@ -43,7 +43,3 @@ func (f *FileSet) Files() ([]fileset.File, error) {
f.view.repo.taintIgnoreRules() f.view.repo.taintIgnoreRules()
return f.fileset.Files() return f.fileset.Files()
} }
func (f *FileSet) EnsureValidGitIgnoreExists() error {
return f.view.EnsureValidGitIgnoreExists()
}

View File

@ -1,10 +1,8 @@
package git package git
import ( import (
"os"
"path" "path"
"path/filepath" "path/filepath"
"strings"
"testing" "testing"
"github.com/databricks/cli/libs/vfs" "github.com/databricks/cli/libs/vfs"
@ -51,34 +49,3 @@ func TestFileSetNonCleanRoot(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
assert.Len(t, files, 3) assert.Len(t, files, 3)
} }
func TestFileSetAddsCacheDirToGitIgnore(t *testing.T) {
projectDir := t.TempDir()
fileSet, err := NewFileSetAtRoot(vfs.MustNew(projectDir))
require.NoError(t, err)
err = fileSet.EnsureValidGitIgnoreExists()
require.NoError(t, err)
gitIgnorePath := filepath.Join(projectDir, ".gitignore")
assert.FileExists(t, gitIgnorePath)
fileBytes, err := os.ReadFile(gitIgnorePath)
assert.NoError(t, err)
assert.Contains(t, string(fileBytes), ".databricks")
}
func TestFileSetDoesNotCacheDirToGitIgnoreIfAlreadyPresent(t *testing.T) {
projectDir := t.TempDir()
gitIgnorePath := filepath.Join(projectDir, ".gitignore")
fileSet, err := NewFileSetAtRoot(vfs.MustNew(projectDir))
require.NoError(t, err)
err = os.WriteFile(gitIgnorePath, []byte(".databricks"), 0o644)
require.NoError(t, err)
err = fileSet.EnsureValidGitIgnoreExists()
require.NoError(t, err)
b, err := os.ReadFile(gitIgnorePath)
require.NoError(t, err)
assert.Equal(t, 1, strings.Count(string(b), ".databricks"))
}

View File

@ -90,46 +90,25 @@ func NewView(worktreeRoot, root vfs.Path) (*View, error) {
target = strings.TrimPrefix(target, string(os.PathSeparator)) target = strings.TrimPrefix(target, string(os.PathSeparator))
target = path.Clean(filepath.ToSlash(target)) target = path.Clean(filepath.ToSlash(target))
return &View{ result := &View{
repo: repo, repo: repo,
targetPath: target, targetPath: target,
}, nil }
result.SetupDefaults()
return result, nil
} }
func NewViewAtRoot(root vfs.Path) (*View, error) { func NewViewAtRoot(root vfs.Path) (*View, error) {
return NewView(root, root) return NewView(root, root)
} }
func (v *View) EnsureValidGitIgnoreExists() error { func (v *View) SetupDefaults() {
ign, err := v.IgnoreDirectory(".databricks")
if err != nil {
return err
}
// return early if .databricks is already being ignored
if ign {
return nil
}
// Create .gitignore with .databricks entry
gitIgnorePath := filepath.Join(v.repo.Root(), v.targetPath, ".gitignore")
file, err := os.OpenFile(gitIgnorePath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o644)
if err != nil {
return err
}
defer file.Close()
// Hard code .databricks ignore pattern so that we never sync it (irrespective) // Hard code .databricks ignore pattern so that we never sync it (irrespective)
// of .gitignore patterns // of .gitignore patterns
v.repo.addIgnoreRule(newStringIgnoreRules([]string{ v.repo.addIgnoreRule(newStringIgnoreRules([]string{
".databricks", ".databricks",
})) }))
_, err = file.WriteString("\n.databricks\n")
if err != nil {
return err
}
v.repo.taintIgnoreRules() v.repo.taintIgnoreRules()
return nil
} }

View File

@ -209,100 +209,12 @@ func TestViewABInTempDir(t *testing.T) {
assert.False(t, tv.Ignore("newfile")) assert.False(t, tv.Ignore("newfile"))
} }
func TestViewDoesNotChangeGitignoreIfCacheDirAlreadyIgnoredAtRoot(t *testing.T) {
expected, err := os.ReadFile("./testdata_view_ignore/.gitignore")
require.NoError(t, err)
repoPath := createFakeRepo(t, "testdata_view_ignore")
// Since root .gitignore already has .databricks, there should be no edits
// to root .gitignore
v, err := NewViewAtRoot(vfs.MustNew(repoPath))
require.NoError(t, err)
err = v.EnsureValidGitIgnoreExists()
require.NoError(t, err)
actual, err := os.ReadFile(filepath.Join(repoPath, ".gitignore"))
require.NoError(t, err)
assert.Equal(t, string(expected), string(actual))
}
func TestViewDoesNotChangeGitignoreIfCacheDirAlreadyIgnoredInSubdir(t *testing.T) {
expected, err := os.ReadFile("./testdata_view_ignore/a/.gitignore")
require.NoError(t, err)
repoPath := createFakeRepo(t, "testdata_view_ignore")
// Since root .gitignore already has .databricks, there should be no edits
// to a/.gitignore
v, err := NewView(vfs.MustNew(repoPath), vfs.MustNew(filepath.Join(repoPath, "a")))
require.NoError(t, err)
err = v.EnsureValidGitIgnoreExists()
require.NoError(t, err)
actual, err := os.ReadFile(filepath.Join(repoPath, v.targetPath, ".gitignore"))
require.NoError(t, err)
assert.Equal(t, string(expected), string(actual))
}
func TestViewAddsGitignoreWithCacheDir(t *testing.T) {
repoPath := createFakeRepo(t, "testdata")
err := os.Remove(filepath.Join(repoPath, ".gitignore"))
assert.NoError(t, err)
// Since root .gitignore was deleted, new view adds .databricks to root .gitignore
v, err := NewViewAtRoot(vfs.MustNew(repoPath))
require.NoError(t, err)
err = v.EnsureValidGitIgnoreExists()
require.NoError(t, err)
actual, err := os.ReadFile(filepath.Join(repoPath, ".gitignore"))
require.NoError(t, err)
assert.Contains(t, string(actual), "\n.databricks\n")
}
func TestViewAddsGitignoreWithCacheDirAtSubdir(t *testing.T) {
repoPath := createFakeRepo(t, "testdata")
err := os.Remove(filepath.Join(repoPath, ".gitignore"))
require.NoError(t, err)
// Since root .gitignore was deleted, new view adds .databricks to a/.gitignore
v, err := NewView(vfs.MustNew(repoPath), vfs.MustNew(filepath.Join(repoPath, "a")))
require.NoError(t, err)
err = v.EnsureValidGitIgnoreExists()
require.NoError(t, err)
actual, err := os.ReadFile(filepath.Join(repoPath, v.targetPath, ".gitignore"))
require.NoError(t, err)
// created .gitignore has cache dir listed
assert.Contains(t, string(actual), "\n.databricks\n")
assert.NoFileExists(t, filepath.Join(repoPath, ".gitignore"))
}
func TestViewAlwaysIgnoresCacheDir(t *testing.T) { func TestViewAlwaysIgnoresCacheDir(t *testing.T) {
repoPath := createFakeRepo(t, "testdata") repoPath := createFakeRepo(t, "testdata")
v, err := NewViewAtRoot(vfs.MustNew(repoPath)) v, err := NewViewAtRoot(vfs.MustNew(repoPath))
require.NoError(t, err) require.NoError(t, err)
err = v.EnsureValidGitIgnoreExists()
require.NoError(t, err)
// Delete root .gitignore which contains .databricks entry
err = os.Remove(filepath.Join(repoPath, ".gitignore"))
require.NoError(t, err)
// taint rules to reload .gitignore
v.repo.taintIgnoreRules()
// assert .databricks is still being ignored // assert .databricks is still being ignored
ign1, err := v.IgnoreDirectory(".databricks") ign1, err := v.IgnoreDirectory(".databricks")
require.NoError(t, err) require.NoError(t, err)

View File

@ -40,4 +40,8 @@ type Extension struct {
// https://code.visualstudio.com/docs/languages/json#_use-rich-formatting-in-hovers // https://code.visualstudio.com/docs/languages/json#_use-rich-formatting-in-hovers
// Also it can be used in documentation generation. // Also it can be used in documentation generation.
MarkdownDescription string `json:"markdownDescription,omitempty"` MarkdownDescription string `json:"markdownDescription,omitempty"`
// This field is not in the JSON schema spec, but it is supported in VSCode
// It is used to provide a warning for deprectated fields
DeprecationMessage string `json:"deprecationMessage,omitempty"`
} }

View File

@ -18,10 +18,6 @@ var skipTags = []string{
// Annotation for internal bundle fields that should not be exposed to customers. // Annotation for internal bundle fields that should not be exposed to customers.
// Fields can be tagged as "internal" to remove them from the generated schema. // Fields can be tagged as "internal" to remove them from the generated schema.
"internal", "internal",
// Annotation for bundle fields that have been deprecated.
// Fields tagged as "deprecated" are omitted from the generated schema.
"deprecated",
} }
type constructor struct { type constructor struct {
@ -259,8 +255,8 @@ func (c *constructor) fromTypeStruct(typ reflect.Type) (Schema, error) {
structFields := getStructFields(typ) structFields := getStructFields(typ)
for _, structField := range structFields { for _, structField := range structFields {
bundleTags := strings.Split(structField.Tag.Get("bundle"), ",") bundleTags := strings.Split(structField.Tag.Get("bundle"), ",")
// Fields marked as "readonly", "internal" or "deprecated" are skipped // Fields marked as "readonly" or "internal" are skipped while generating
// while generating the schema // the schema
skip := false skip := false
for _, tag := range skipTags { for _, tag := range skipTags {
if slices.Contains(bundleTags, tag) { if slices.Contains(bundleTags, tag) {

View File

@ -17,11 +17,10 @@ func TestFromTypeBasic(t *testing.T) {
TriplePointer ***int `json:"triple_pointer,omitempty"` TriplePointer ***int `json:"triple_pointer,omitempty"`
// These fields should be ignored in the resulting schema. // These fields should be ignored in the resulting schema.
NotAnnotated string NotAnnotated string
DashedTag string `json:"-"` DashedTag string `json:"-"`
InternalTagged string `json:"internal_tagged" bundle:"internal"` InternalTagged string `json:"internal_tagged" bundle:"internal"`
DeprecatedTagged string `json:"deprecated_tagged" bundle:"deprecated"` ReadOnlyTagged string `json:"readonly_tagged" bundle:"readonly"`
ReadOnlyTagged string `json:"readonly_tagged" bundle:"readonly"`
} }
strRef := "#/$defs/string" strRef := "#/$defs/string"

View File

@ -80,6 +80,11 @@ type Schema struct {
// Examples of the value for properties in the schema. // Examples of the value for properties in the schema.
// https://json-schema.org/understanding-json-schema/reference/annotations // https://json-schema.org/understanding-json-schema/reference/annotations
Examples any `json:"examples,omitempty"` Examples any `json:"examples,omitempty"`
// A boolean that indicates the field should not be used and may be removed
// in the future.
// https://json-schema.org/understanding-json-schema/reference/annotations
Deprecated bool `json:"deprecated,omitempty"`
} }
// Default value defined in a JSON Schema, represented as a string. // Default value defined in a JSON Schema, represented as a string.

26
libs/sync/gitignore.go Normal file
View File

@ -0,0 +1,26 @@
package sync
import (
"context"
"os"
"path/filepath"
"github.com/databricks/cli/libs/log"
)
func WriteGitIgnore(ctx context.Context, dir string) {
gitignorePath := filepath.Join(dir, ".databricks", ".gitignore")
file, err := os.OpenFile(gitignorePath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0o644)
if err != nil {
if os.IsExist(err) {
return
}
log.Debugf(ctx, "Failed to create %s: %s", gitignorePath, err)
}
defer file.Close()
_, err = file.WriteString("*\n")
if err != nil {
log.Debugf(ctx, "Error writing to %s: %s", gitignorePath, err)
}
}

View File

@ -69,10 +69,7 @@ func New(ctx context.Context, opts SyncOptions) (*Sync, error) {
return nil, err return nil, err
} }
err = fileSet.EnsureValidGitIgnoreExists() WriteGitIgnore(ctx, opts.LocalRoot.Native())
if err != nil {
return nil, err
}
includeFileSet, err := fileset.NewGlobSet(opts.LocalRoot, opts.Include) includeFileSet, err := fileset.NewGlobSet(opts.LocalRoot, opts.Include)
if err != nil { if err != nil {

View File

@ -40,9 +40,6 @@ func TestGetFileSet(t *testing.T) {
fileSet, err := git.NewFileSetAtRoot(root) fileSet, err := git.NewFileSetAtRoot(root)
require.NoError(t, err) require.NoError(t, err)
err = fileSet.EnsureValidGitIgnoreExists()
require.NoError(t, err)
inc, err := fileset.NewGlobSet(root, []string{}) inc, err := fileset.NewGlobSet(root, []string{})
require.NoError(t, err) require.NoError(t, err)
@ -59,7 +56,7 @@ func TestGetFileSet(t *testing.T) {
fileList, err := s.GetFileList(ctx) fileList, err := s.GetFileList(ctx)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, fileList, 10) require.Len(t, fileList, 9)
inc, err = fileset.NewGlobSet(root, []string{}) inc, err = fileset.NewGlobSet(root, []string{})
require.NoError(t, err) require.NoError(t, err)
@ -77,7 +74,7 @@ func TestGetFileSet(t *testing.T) {
fileList, err = s.GetFileList(ctx) fileList, err = s.GetFileList(ctx)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, fileList, 2) require.Len(t, fileList, 1)
inc, err = fileset.NewGlobSet(root, []string{"./.databricks/*.go"}) inc, err = fileset.NewGlobSet(root, []string{"./.databricks/*.go"})
require.NoError(t, err) require.NoError(t, err)
@ -95,7 +92,7 @@ func TestGetFileSet(t *testing.T) {
fileList, err = s.GetFileList(ctx) fileList, err = s.GetFileList(ctx)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, fileList, 11) require.Len(t, fileList, 10)
} }
func TestRecursiveExclude(t *testing.T) { func TestRecursiveExclude(t *testing.T) {
@ -106,9 +103,6 @@ func TestRecursiveExclude(t *testing.T) {
fileSet, err := git.NewFileSetAtRoot(root) fileSet, err := git.NewFileSetAtRoot(root)
require.NoError(t, err) require.NoError(t, err)
err = fileSet.EnsureValidGitIgnoreExists()
require.NoError(t, err)
inc, err := fileset.NewGlobSet(root, []string{}) inc, err := fileset.NewGlobSet(root, []string{})
require.NoError(t, err) require.NoError(t, err)
@ -125,7 +119,7 @@ func TestRecursiveExclude(t *testing.T) {
fileList, err := s.GetFileList(ctx) fileList, err := s.GetFileList(ctx)
require.NoError(t, err) require.NoError(t, err)
require.Len(t, fileList, 7) require.Len(t, fileList, 6)
} }
func TestNegateExclude(t *testing.T) { func TestNegateExclude(t *testing.T) {
@ -136,9 +130,6 @@ func TestNegateExclude(t *testing.T) {
fileSet, err := git.NewFileSetAtRoot(root) fileSet, err := git.NewFileSetAtRoot(root)
require.NoError(t, err) require.NoError(t, err)
err = fileSet.EnsureValidGitIgnoreExists()
require.NoError(t, err)
inc, err := fileset.NewGlobSet(root, []string{}) inc, err := fileset.NewGlobSet(root, []string{})
require.NoError(t, err) require.NoError(t, err)

View File

@ -24,10 +24,12 @@ type ExecutionContext struct {
FromWebTerminal bool `json:"from_web_terminal,omitempty"` FromWebTerminal bool `json:"from_web_terminal,omitempty"`
// Time taken for the CLI command to execute. // Time taken for the CLI command to execute.
ExecutionTimeMs int64 `json:"execution_time_ms,omitempty"` // We want to serialize the zero value as well so the omitempty tag is not set.
ExecutionTimeMs int64 `json:"execution_time_ms"`
// Exit code of the CLI command. // Exit code of the CLI command.
ExitCode int64 `json:"exit_code,omitempty"` // We want to serialize the zero value as well so the omitempty tag is not set.
ExitCode int64 `json:"exit_code"`
} }
type CliTestEvent struct { type CliTestEvent struct {

View File

@ -13,7 +13,7 @@ resources:
{{- else}} {{- else}}
catalog: {{default_catalog}} catalog: {{default_catalog}}
{{- end}} {{- end}}
target: {{.project_name}}_${bundle.target} schema: {{.project_name}}_${bundle.target}
{{- if $with_serverless }} {{- if $with_serverless }}
serverless: true serverless: true
{{- end}} {{- end}}