mirror of https://github.com/databricks/cli.git
add tests for scala notebooks
This commit is contained in:
parent
2eb6ea5d91
commit
cd8cc2c531
|
@ -371,7 +371,7 @@ func TestAccFilerWorkspaceNotebookConflict(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
for _, tcases := range []struct {
|
for _, tc := range []struct {
|
||||||
name string
|
name string
|
||||||
nameWithoutExt string
|
nameWithoutExt string
|
||||||
content1 string
|
content1 string
|
||||||
|
@ -381,16 +381,16 @@ func TestAccFilerWorkspaceNotebookConflict(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "pyNb.py",
|
name: "pyNb.py",
|
||||||
nameWithoutExt: "pyNb",
|
nameWithoutExt: "pyNb",
|
||||||
content1: "# Databricks notebook source\nprint('first upload'))",
|
content1: "# Databricks notebook source\nprint('first upload')",
|
||||||
expected1: "# Databricks notebook source\nprint('first upload'))",
|
expected1: "# Databricks notebook source\nprint('first upload')",
|
||||||
content2: "# Databricks notebook source\nprint('second upload'))",
|
content2: "# Databricks notebook source\nprint('second upload')",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "rNb.r",
|
name: "rNb.r",
|
||||||
nameWithoutExt: "rNb",
|
nameWithoutExt: "rNb",
|
||||||
content1: "# Databricks notebook source\nprint('first upload'))",
|
content1: "# Databricks notebook source\nprint('first upload')",
|
||||||
expected1: "# Databricks notebook source\nprint('first upload'))",
|
expected1: "# Databricks notebook source\nprint('first upload')",
|
||||||
content2: "# Databricks notebook source\nprint('second upload'))",
|
content2: "# Databricks notebook source\nprint('second upload')",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "sqlNb.sql",
|
name: "sqlNb.sql",
|
||||||
|
@ -402,9 +402,9 @@ func TestAccFilerWorkspaceNotebookConflict(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "scalaNb.scala",
|
name: "scalaNb.scala",
|
||||||
nameWithoutExt: "scalaNb",
|
nameWithoutExt: "scalaNb",
|
||||||
content1: "// Databricks notebook source\n println(\"first upload\"))",
|
content1: "// Databricks notebook source\n println(\"first upload\")",
|
||||||
expected1: "// Databricks notebook source\n println(\"first upload\"))",
|
expected1: "// Databricks notebook source\n println(\"first upload\")",
|
||||||
content2: "// Databricks notebook source\n println(\"second upload\"))",
|
content2: "// Databricks notebook source\n println(\"second upload\")",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "pythonJupyterNb.ipynb",
|
name: "pythonJupyterNb.ipynb",
|
||||||
|
@ -420,19 +420,26 @@ func TestAccFilerWorkspaceNotebookConflict(t *testing.T) {
|
||||||
expected1: "# Databricks notebook source\nprint(1)",
|
expected1: "# Databricks notebook source\nprint(1)",
|
||||||
content2: readFile(t, "testdata/notebooks/r2.ipynb"),
|
content2: readFile(t, "testdata/notebooks/r2.ipynb"),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "scalaJupyterNb.ipynb",
|
||||||
|
nameWithoutExt: "scalaJupyterNb",
|
||||||
|
content1: readFile(t, "testdata/notebooks/scala1.ipynb"),
|
||||||
|
expected1: "// Databricks notebook source\nprintln(1)",
|
||||||
|
content2: readFile(t, "testdata/notebooks/scala2.ipynb"),
|
||||||
|
},
|
||||||
} {
|
} {
|
||||||
t.Run(tcases.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
// Upload the notebook
|
// Upload the notebook
|
||||||
err = f.Write(ctx, tcases.name, strings.NewReader(tcases.content1))
|
err = f.Write(ctx, tc.name, strings.NewReader(tc.content1))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Assert contents after initial upload
|
// Assert contents after initial upload
|
||||||
filerTest{t, f}.assertContents(ctx, tcases.nameWithoutExt, tcases.expected1)
|
filerTest{t, f}.assertContents(ctx, tc.nameWithoutExt, tc.expected1)
|
||||||
|
|
||||||
// Assert uploading a second time fails due to overwrite mode missing
|
// Assert uploading a second time fails due to overwrite mode missing
|
||||||
err = f.Write(ctx, tcases.name, strings.NewReader(tcases.content2))
|
err = f.Write(ctx, tc.name, strings.NewReader(tc.content2))
|
||||||
assert.ErrorIs(t, err, fs.ErrExist)
|
assert.ErrorIs(t, err, fs.ErrExist)
|
||||||
assert.Regexp(t, regexp.MustCompile(`file already exists: .*/`+tcases.nameWithoutExt+`$`), err.Error())
|
assert.Regexp(t, regexp.MustCompile(`file already exists: .*/`+tc.nameWithoutExt+`$`), err.Error())
|
||||||
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -456,18 +463,18 @@ func TestAccFilerWorkspaceNotebookWithOverwriteFlag(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "pyNb.py",
|
name: "pyNb.py",
|
||||||
nameWithoutExt: "pyNb",
|
nameWithoutExt: "pyNb",
|
||||||
content1: "# Databricks notebook source\nprint('first upload'))",
|
content1: "# Databricks notebook source\nprint('first upload')",
|
||||||
expected1: "# Databricks notebook source\nprint('first upload'))",
|
expected1: "# Databricks notebook source\nprint('first upload')",
|
||||||
content2: "# Databricks notebook source\nprint('second upload'))",
|
content2: "# Databricks notebook source\nprint('second upload')",
|
||||||
expected2: "# Databricks notebook source\nprint('second upload'))",
|
expected2: "# Databricks notebook source\nprint('second upload')",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "rNb.r",
|
name: "rNb.r",
|
||||||
nameWithoutExt: "rNb",
|
nameWithoutExt: "rNb",
|
||||||
content1: "# Databricks notebook source\nprint('first upload'))",
|
content1: "# Databricks notebook source\nprint('first upload')",
|
||||||
expected1: "# Databricks notebook source\nprint('first upload'))",
|
expected1: "# Databricks notebook source\nprint('first upload')",
|
||||||
content2: "# Databricks notebook source\nprint('second upload'))",
|
content2: "# Databricks notebook source\nprint('second upload')",
|
||||||
expected2: "# Databricks notebook source\nprint('second upload'))",
|
expected2: "# Databricks notebook source\nprint('second upload')",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "sqlNb.sql",
|
name: "sqlNb.sql",
|
||||||
|
@ -480,26 +487,34 @@ func TestAccFilerWorkspaceNotebookWithOverwriteFlag(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "scalaNb.scala",
|
name: "scalaNb.scala",
|
||||||
nameWithoutExt: "scalaNb",
|
nameWithoutExt: "scalaNb",
|
||||||
content1: "// Databricks notebook source\n println(\"first upload\"))",
|
content1: "// Databricks notebook source\n println(\"first upload\")",
|
||||||
expected1: "// Databricks notebook source\n println(\"first upload\"))",
|
expected1: "// Databricks notebook source\n println(\"first upload\")",
|
||||||
content2: "// Databricks notebook source\n println(\"second upload\"))",
|
content2: "// Databricks notebook source\n println(\"second upload\")",
|
||||||
expected2: "// Databricks notebook source\n println(\"second upload\"))",
|
expected2: "// Databricks notebook source\n println(\"second upload\")",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "pythonJupyterNb.ipynb",
|
name: "pythonJupyterNb.ipynb",
|
||||||
nameWithoutExt: "pythonJupyterNb",
|
nameWithoutExt: "pythonJupyterNb",
|
||||||
content1: readFile(t, "testdata/notebooks/py1.ipynb"),
|
content1: readFile(t, "testdata/notebooks/py1.ipynb"),
|
||||||
expected1: "# Databricks notebook source\nprint(1))",
|
expected1: "# Databricks notebook source\nprint(1)",
|
||||||
content2: readFile(t, "testdata/notebooks/py2.ipynb"),
|
content2: readFile(t, "testdata/notebooks/py2.ipynb"),
|
||||||
expected2: "# Databricks notebook source\nprint(2))",
|
expected2: "# Databricks notebook source\nprint(2)",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "rJupyterNb.ipynb",
|
name: "rJupyterNb.ipynb",
|
||||||
nameWithoutExt: "rJupyterNb",
|
nameWithoutExt: "rJupyterNb",
|
||||||
content1: readFile(t, "testdata/notebooks/r1.ipynb"),
|
content1: readFile(t, "testdata/notebooks/r1.ipynb"),
|
||||||
expected1: "# Databricks notebook source\nprint(1))",
|
expected1: "# Databricks notebook source\nprint(1)",
|
||||||
content2: readFile(t, "testdata/notebooks/r2.ipynb"),
|
content2: readFile(t, "testdata/notebooks/r2.ipynb"),
|
||||||
expected2: "# Databricks notebook source\nprint(2))",
|
expected2: "# Databricks notebook source\nprint(2)",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "scalaJupyterNb.ipynb",
|
||||||
|
nameWithoutExt: "scalaJupyterNb",
|
||||||
|
content1: readFile(t, "testdata/notebooks/scala1.ipynb"),
|
||||||
|
expected1: "// Databricks notebook source\nprintln(1)",
|
||||||
|
content2: readFile(t, "testdata/notebooks/scala2.ipynb"),
|
||||||
|
expected2: "// Databricks notebook source\nprintln(2)",
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
t.Run(tcases.name, func(t *testing.T) {
|
t.Run(tcases.name, func(t *testing.T) {
|
||||||
|
@ -540,6 +555,7 @@ func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) {
|
||||||
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")},
|
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")},
|
||||||
{"pyNb.py", "# Databricks notebook source\nprint('first upload'))"},
|
{"pyNb.py", "# Databricks notebook source\nprint('first upload'))"},
|
||||||
{"rNb.r", "# Databricks notebook source\nprint('first upload'))"},
|
{"rNb.r", "# Databricks notebook source\nprint('first upload'))"},
|
||||||
|
{"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")},
|
||||||
{"scalaNb.scala", "// Databricks notebook source\n println(\"first upload\"))"},
|
{"scalaNb.scala", "// Databricks notebook source\n println(\"first upload\"))"},
|
||||||
{"sqlNb.sql", "-- Databricks notebook source\n SELECT \"first upload\""},
|
{"sqlNb.sql", "-- Databricks notebook source\n SELECT \"first upload\""},
|
||||||
}
|
}
|
||||||
|
@ -580,6 +596,7 @@ func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) {
|
||||||
"pyNb.py",
|
"pyNb.py",
|
||||||
"r1.ipynb",
|
"r1.ipynb",
|
||||||
"rNb.r",
|
"rNb.r",
|
||||||
|
"scala1.ipynb",
|
||||||
"scalaNb.scala",
|
"scalaNb.scala",
|
||||||
"sqlNb.sql",
|
"sqlNb.sql",
|
||||||
}, names)
|
}, names)
|
||||||
|
@ -606,6 +623,7 @@ func setupFilerWithExtensionsTest(t *testing.T) filer.Filer {
|
||||||
{"bar.py", "print('foo')"},
|
{"bar.py", "print('foo')"},
|
||||||
{"p1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")},
|
{"p1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")},
|
||||||
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")},
|
{"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")},
|
||||||
|
{"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")},
|
||||||
{"pretender", "not a notebook"},
|
{"pretender", "not a notebook"},
|
||||||
{"dir/file.txt", "file content"},
|
{"dir/file.txt", "file content"},
|
||||||
{"scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')"},
|
{"scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')"},
|
||||||
|
@ -633,6 +651,7 @@ func TestAccFilerWorkspaceFilesExtensionsRead(t *testing.T) {
|
||||||
filerTest{t, wf}.assertContents(ctx, "bar.py", "print('foo')")
|
filerTest{t, wf}.assertContents(ctx, "bar.py", "print('foo')")
|
||||||
filerTest{t, wf}.assertContentsJupyter(ctx, "p1.ipynb", "python")
|
filerTest{t, wf}.assertContentsJupyter(ctx, "p1.ipynb", "python")
|
||||||
filerTest{t, wf}.assertContentsJupyter(ctx, "r1.ipynb", "R")
|
filerTest{t, wf}.assertContentsJupyter(ctx, "r1.ipynb", "R")
|
||||||
|
filerTest{t, wf}.assertContentsJupyter(ctx, "scala1.ipynb", "scala")
|
||||||
filerTest{t, wf}.assertContents(ctx, "dir/file.txt", "file content")
|
filerTest{t, wf}.assertContents(ctx, "dir/file.txt", "file content")
|
||||||
filerTest{t, wf}.assertContents(ctx, "scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')")
|
filerTest{t, wf}.assertContents(ctx, "scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')")
|
||||||
filerTest{t, wf}.assertContents(ctx, "pretender", "not a notebook")
|
filerTest{t, wf}.assertContents(ctx, "pretender", "not a notebook")
|
||||||
|
@ -682,6 +701,11 @@ func TestAccFilerWorkspaceFilesExtensionsDelete(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
filerTest{t, wf}.assertNotExists(ctx, "r1.ipynb")
|
filerTest{t, wf}.assertNotExists(ctx, "r1.ipynb")
|
||||||
|
|
||||||
|
// Delete scala jupyter notebook
|
||||||
|
err = wf.Delete(ctx, "scala1.ipynb")
|
||||||
|
require.NoError(t, err)
|
||||||
|
filerTest{t, wf}.assertNotExists(ctx, "scala1.ipynb")
|
||||||
|
|
||||||
// Delete non-existent file
|
// Delete non-existent file
|
||||||
err = wf.Delete(ctx, "non-existent.py")
|
err = wf.Delete(ctx, "non-existent.py")
|
||||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||||
|
@ -734,6 +758,12 @@ func TestAccFilerWorkspaceFilesExtensionsStat(t *testing.T) {
|
||||||
assert.Equal(t, "r1.ipynb", info.Name())
|
assert.Equal(t, "r1.ipynb", info.Name())
|
||||||
assert.False(t, info.IsDir())
|
assert.False(t, info.IsDir())
|
||||||
|
|
||||||
|
// Stat on a Scala jupyter notebook
|
||||||
|
info, err = wf.Stat(ctx, "scala1.ipynb")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "scala1.ipynb", info.Name())
|
||||||
|
assert.False(t, info.IsDir())
|
||||||
|
|
||||||
// Stat on a directory
|
// Stat on a directory
|
||||||
info, err = wf.Stat(ctx, "dir")
|
info, err = wf.Stat(ctx, "dir")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -794,10 +824,14 @@ func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) {
|
||||||
sourceContent: "# Databricks notebook source\nprint('foo')",
|
sourceContent: "# Databricks notebook source\nprint('foo')",
|
||||||
jupyterName: "foo.ipynb",
|
jupyterName: "foo.ipynb",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
language: "scala",
|
||||||
|
sourceName: "foo.scala",
|
||||||
|
sourceContent: "// Databricks notebook source\nprintln('foo')",
|
||||||
|
jupyterName: "foo.ipynb",
|
||||||
|
},
|
||||||
} {
|
} {
|
||||||
t.Run(tc.language, func(t *testing.T) {
|
t.Run("source_"+tc.language, func(t *testing.T) {
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
wf, _ := setupWsfsExtensionsFiler(t)
|
wf, _ := setupWsfsExtensionsFiler(t)
|
||||||
|
|
||||||
|
@ -834,10 +868,14 @@ func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) {
|
||||||
jupyterName: "foo.ipynb",
|
jupyterName: "foo.ipynb",
|
||||||
jupyterContent: readFile(t, "testdata/notebooks/r1.ipynb"),
|
jupyterContent: readFile(t, "testdata/notebooks/r1.ipynb"),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
language: "scala",
|
||||||
|
sourceName: "bar.scala",
|
||||||
|
jupyterName: "foo.ipynb",
|
||||||
|
jupyterContent: readFile(t, "testdata/notebooks/scala1.ipynb"),
|
||||||
|
},
|
||||||
} {
|
} {
|
||||||
t.Run(tc.language, func(t *testing.T) {
|
t.Run("jupyter_"+tc.language, func(t *testing.T) {
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
wf, _ := setupWsfsExtensionsFiler(t)
|
wf, _ := setupWsfsExtensionsFiler(t)
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,14 @@
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": null,
|
"execution_count": null,
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
|
"vscode": {
|
||||||
|
"languageId": "r"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"print(1)"
|
"print(2)"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"1\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"println(1)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Scala",
|
||||||
|
"language": "scala",
|
||||||
|
"name": "scala"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": "text/x-scala",
|
||||||
|
"file_extension": ".sc",
|
||||||
|
"mimetype": "text/x-scala",
|
||||||
|
"name": "scala",
|
||||||
|
"nbconvert_exporter": "script",
|
||||||
|
"version": "2.13.14"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"1\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"println(2)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Scala",
|
||||||
|
"language": "scala",
|
||||||
|
"name": "scala"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": "text/x-scala",
|
||||||
|
"file_extension": ".sc",
|
||||||
|
"mimetype": "text/x-scala",
|
||||||
|
"name": "scala",
|
||||||
|
"nbconvert_exporter": "script",
|
||||||
|
"version": "2.13.14"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
Loading…
Reference in New Issue