mirror of https://github.com/databricks/cli.git
Merge remote-tracking branch 'origin' into daemon-library
This commit is contained in:
commit
de6ebdb44e
|
@ -57,6 +57,8 @@ const (
|
||||||
CleanupScript = "script.cleanup"
|
CleanupScript = "script.cleanup"
|
||||||
PrepareScript = "script.prepare"
|
PrepareScript = "script.prepare"
|
||||||
MaxFileSize = 100_000
|
MaxFileSize = 100_000
|
||||||
|
// Filename to save replacements to (used by diff.py)
|
||||||
|
ReplsFile = "repls.json"
|
||||||
)
|
)
|
||||||
|
|
||||||
var Scripts = map[string]bool{
|
var Scripts = map[string]bool{
|
||||||
|
@ -65,6 +67,10 @@ var Scripts = map[string]bool{
|
||||||
PrepareScript: true,
|
PrepareScript: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var Ignored = map[string]bool{
|
||||||
|
ReplsFile: true,
|
||||||
|
}
|
||||||
|
|
||||||
func TestAccept(t *testing.T) {
|
func TestAccept(t *testing.T) {
|
||||||
testAccept(t, InprocessMode, SingleTest)
|
testAccept(t, InprocessMode, SingleTest)
|
||||||
}
|
}
|
||||||
|
@ -153,6 +159,8 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
testdiff.PrepareReplacementSdkVersion(t, &repls)
|
testdiff.PrepareReplacementSdkVersion(t, &repls)
|
||||||
testdiff.PrepareReplacementsGoVersion(t, &repls)
|
testdiff.PrepareReplacementsGoVersion(t, &repls)
|
||||||
|
|
||||||
|
repls.SetPath(cwd, "[TESTROOT]")
|
||||||
|
|
||||||
repls.Repls = append(repls.Repls, testdiff.Replacement{Old: regexp.MustCompile("dbapi[0-9a-f]+"), New: "[DATABRICKS_TOKEN]"})
|
repls.Repls = append(repls.Repls, testdiff.Replacement{Old: regexp.MustCompile("dbapi[0-9a-f]+"), New: "[DATABRICKS_TOKEN]"})
|
||||||
|
|
||||||
testDirs := getTests(t)
|
testDirs := getTests(t)
|
||||||
|
@ -311,6 +319,11 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
// User replacements come last:
|
// User replacements come last:
|
||||||
repls.Repls = append(repls.Repls, config.Repls...)
|
repls.Repls = append(repls.Repls, config.Repls...)
|
||||||
|
|
||||||
|
// Save replacements to temp test directory so that it can be read by diff.py
|
||||||
|
replsJson, err := json.MarshalIndent(repls.Repls, "", " ")
|
||||||
|
require.NoError(t, err)
|
||||||
|
testutil.WriteFile(t, filepath.Join(tmpDir, ReplsFile), string(replsJson))
|
||||||
|
|
||||||
if coverDir != "" {
|
if coverDir != "" {
|
||||||
// Creating individual coverage directory for each test, because writing to the same one
|
// Creating individual coverage directory for each test, because writing to the same one
|
||||||
// results in sporadic failures like this one (only if tests are running in parallel):
|
// results in sporadic failures like this one (only if tests are running in parallel):
|
||||||
|
@ -321,6 +334,10 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
|
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
absDir, err := filepath.Abs(dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
cmd.Env = append(cmd.Env, "TESTDIR="+absDir)
|
||||||
|
|
||||||
// Write combined output to a file
|
// Write combined output to a file
|
||||||
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
|
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -369,6 +386,9 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
if _, ok := outputs[relPath]; ok {
|
if _, ok := outputs[relPath]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if _, ok := Ignored[relPath]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
unexpected = append(unexpected, relPath)
|
unexpected = append(unexpected, relPath)
|
||||||
if strings.HasPrefix(relPath, "out") {
|
if strings.HasPrefix(relPath, "out") {
|
||||||
// We have a new file starting with "out"
|
// We have a new file starting with "out"
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""This script implements "diff -r -U2 dir1 dir2" but applies replacements first"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import difflib
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def replaceAll(patterns, s):
|
||||||
|
for comp, new in patterns:
|
||||||
|
s = comp.sub(new, s)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
d1, d2 = sys.argv[1:]
|
||||||
|
d1, d2 = Path(d1), Path(d2)
|
||||||
|
|
||||||
|
with open("repls.json") as f:
|
||||||
|
repls = json.load(f)
|
||||||
|
|
||||||
|
patterns = []
|
||||||
|
for r in repls:
|
||||||
|
try:
|
||||||
|
c = re.compile(r["Old"])
|
||||||
|
patterns.append((c, r["New"]))
|
||||||
|
except re.error as e:
|
||||||
|
print(f"Regex error for pattern {r}: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
files1 = [str(p.relative_to(d1)) for p in d1.rglob("*") if p.is_file()]
|
||||||
|
files2 = [str(p.relative_to(d2)) for p in d2.rglob("*") if p.is_file()]
|
||||||
|
|
||||||
|
set1 = set(files1)
|
||||||
|
set2 = set(files2)
|
||||||
|
|
||||||
|
for f in sorted(set1 | set2):
|
||||||
|
p1 = d1 / f
|
||||||
|
p2 = d2 / f
|
||||||
|
if f not in set2:
|
||||||
|
print(f"Only in {d1}: {f}")
|
||||||
|
elif f not in set1:
|
||||||
|
print(f"Only in {d2}: {f}")
|
||||||
|
else:
|
||||||
|
a = [replaceAll(patterns, x) for x in p1.read_text().splitlines(True)]
|
||||||
|
b = [replaceAll(patterns, x) for x in p2.read_text().splitlines(True)]
|
||||||
|
if a != b:
|
||||||
|
p1_str = p1.as_posix()
|
||||||
|
p2_str = p2.as_posix()
|
||||||
|
for line in difflib.unified_diff(a, b, p1_str, p2_str, "", "", 2):
|
||||||
|
print(line, end="")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -37,10 +37,12 @@ The 'my_default_python' project was generated by using the default-python templa
|
||||||
```
|
```
|
||||||
$ databricks bundle run
|
$ databricks bundle run
|
||||||
```
|
```
|
||||||
|
6. Optionally, install the Databricks extension for Visual Studio code for local development from
|
||||||
6. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from
|
https://docs.databricks.com/dev-tools/vscode-ext.html. It can configure your
|
||||||
https://docs.databricks.com/dev-tools/vscode-ext.html. Or read the "getting started" documentation for
|
virtual environment and setup Databricks Connect for running unit tests locally.
|
||||||
**Databricks Connect** for instructions on running the included Python code from a different IDE.
|
When not using these tools, consult your development environment's documentation
|
||||||
|
and/or the documentation for Databricks Connect for manually setting up your environment
|
||||||
|
(https://docs.databricks.com/en/dev-tools/databricks-connect/python/index.html).
|
||||||
|
|
||||||
7. For documentation on the Databricks asset bundles format used
|
7. For documentation on the Databricks asset bundles format used
|
||||||
for this project, and for CI/CD configuration, see
|
for this project, and for CI/CD configuration, see
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
Hello!
|
||||||
|
{
|
||||||
|
"id": "[USERID]",
|
||||||
|
"userName": "[USERNAME]"
|
||||||
|
}
|
||||||
|
|
||||||
|
Footer
|
|
@ -0,0 +1,7 @@
|
||||||
|
Hello!
|
||||||
|
{
|
||||||
|
"id": "[UUID]",
|
||||||
|
"userName": "[USERNAME]"
|
||||||
|
}
|
||||||
|
|
||||||
|
Footer
|
|
@ -0,0 +1,13 @@
|
||||||
|
|
||||||
|
>>> diff.py out_dir_a out_dir_b
|
||||||
|
Only in out_dir_a: only_in_a
|
||||||
|
Only in out_dir_b: only_in_b
|
||||||
|
--- out_dir_a/output.txt
|
||||||
|
+++ out_dir_b/output.txt
|
||||||
|
@@ -1,5 +1,5 @@
|
||||||
|
Hello!
|
||||||
|
{
|
||||||
|
- "id": "[USERID]",
|
||||||
|
+ "id": "[UUID]",
|
||||||
|
"userName": "[USERNAME]"
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
mkdir out_dir_a
|
||||||
|
mkdir out_dir_b
|
||||||
|
|
||||||
|
touch out_dir_a/only_in_a
|
||||||
|
touch out_dir_b/only_in_b
|
||||||
|
|
||||||
|
echo Hello! >> out_dir_a/output.txt
|
||||||
|
echo Hello! >> out_dir_b/output.txt
|
||||||
|
|
||||||
|
curl -s $DATABRICKS_HOST/api/2.0/preview/scim/v2/Me >> out_dir_a/output.txt
|
||||||
|
printf "\n\nFooter" >> out_dir_a/output.txt
|
||||||
|
printf '{\n "id": "7d639bad-ac6d-4e6f-abd7-9522a86b0239",\n "userName": "[USERNAME]"\n}\n\nFooter' >> out_dir_b/output.txt
|
||||||
|
|
||||||
|
# Unlike regular diff, diff.py will apply replacements first before doing the comparison
|
||||||
|
errcode trace diff.py out_dir_a out_dir_b
|
||||||
|
|
||||||
|
rm out_dir_a/only_in_a out_dir_b/only_in_b
|
|
@ -0,0 +1 @@
|
||||||
|
LocalOnly = true
|
|
@ -38,10 +38,16 @@ The '{{.project_name}}' project was generated by using the default-python templa
|
||||||
$ databricks bundle run
|
$ databricks bundle run
|
||||||
```
|
```
|
||||||
|
|
||||||
|
{{- if (eq .include_python "no") }}
|
||||||
6. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from
|
6. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from
|
||||||
https://docs.databricks.com/dev-tools/vscode-ext.html.
|
https://docs.databricks.com/dev-tools/vscode-ext.html.
|
||||||
{{- if (eq .include_python "yes") }} Or read the "getting started" documentation for
|
{{- else }}
|
||||||
**Databricks Connect** for instructions on running the included Python code from a different IDE.
|
6. Optionally, install the Databricks extension for Visual Studio code for local development from
|
||||||
|
https://docs.databricks.com/dev-tools/vscode-ext.html. It can configure your
|
||||||
|
virtual environment and setup Databricks Connect for running unit tests locally.
|
||||||
|
When not using these tools, consult your development environment's documentation
|
||||||
|
and/or the documentation for Databricks Connect for manually setting up your environment
|
||||||
|
(https://docs.databricks.com/en/dev-tools/databricks-connect/python/index.html).
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
7. For documentation on the Databricks asset bundles format used
|
7. For documentation on the Databricks asset bundles format used
|
||||||
|
|
Loading…
Reference in New Issue