Migrate path translation tests to acceptance tests (#2122)

## Changes

The assertions on the output made are now captured in the `output.*`
files. These don't capture intent like actual assertions do, but we
still have regular test coverage in the path translation tests under
`bundle/config/mutator`.

## Tests

Tests pass.
This commit is contained in:
Pieter Noordhuis 2025-01-17 11:22:49 +01:00 committed by GitHub
parent 9061635789
commit 89eb556318
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
48 changed files with 314 additions and 142 deletions

View File

@ -1,5 +1,5 @@
bundle:
name: path_translation_nominal
name: fallback
include:
- "resources/*.yml"

View File

@ -0,0 +1,67 @@
[
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
{
"dbt_task": {
"commands": [
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
},
{
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},
"task_key": "sql_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"whl": "dist/wheel1.whl"
},
{
"whl": "dist/wheel2.whl"
}
],
"python_wheel_task": {
"package_name": "my_package"
},
"task_key": "python_wheel_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"jar": "target/jar1.jar"
},
{
"jar": "target/jar2.jar"
}
],
"spark_jar_task": {
"main_class_name": "com.example.Main"
},
"task_key": "spark_jar_example"
}
]

View File

@ -0,0 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook2"
}
}
]

View File

@ -0,0 +1,18 @@
>>> $CLI bundle validate -t development -o json
Exit code: 0
>>> $CLI bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: fallback
Target: error
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/fallback/error
Found 1 error
Exit code: 1

View File

@ -4,33 +4,45 @@ resources:
name: "placeholder"
tasks:
- task_key: notebook_example
job_cluster_key: default
notebook_task:
notebook_path: "this value is overridden"
- task_key: spark_python_example
job_cluster_key: default
spark_python_task:
python_file: "this value is overridden"
- task_key: dbt_example
job_cluster_key: default
dbt_task:
project_directory: "this value is overridden"
commands:
- "dbt run"
- task_key: sql_example
job_cluster_key: default
sql_task:
file:
path: "this value is overridden"
warehouse_id: cafef00d
- task_key: python_wheel_example
job_cluster_key: default
python_wheel_task:
package_name: my_package
libraries:
- whl: ../dist/wheel1.whl
- task_key: spark_jar_example
job_cluster_key: default
spark_jar_task:
main_class_name: com.example.Main
libraries:
- jar: ../target/jar1.jar
# Include a job cluster for completeness
job_clusters:
- job_cluster_key: default
new_cluster:
spark_version: 15.4.x-scala2.12

View File

@ -0,0 +1,10 @@
errcode trace $CLI bundle validate -t development -o json > output.tmp.json
# Capture job tasks
jq '.resources.jobs.my_job.tasks' output.tmp.json > output.job.json
# Capture pipeline libraries
jq '.resources.pipelines.my_pipeline.libraries' output.tmp.json > output.pipeline.json
# Expect failure for the "error" target
errcode trace $CLI bundle validate -t error

View File

@ -0,0 +1 @@
rm -f output.tmp.json

View File

@ -1,5 +1,5 @@
bundle:
name: path_translation_fallback
name: nominal
include:
- "resources/*.yml"

View File

@ -0,0 +1,89 @@
[
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
{
"dbt_task": {
"commands": [
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
},
{
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},
"task_key": "sql_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"whl": "dist/wheel1.whl"
},
{
"whl": "dist/wheel2.whl"
}
],
"python_wheel_task": {
"package_name": "my_package"
},
"task_key": "python_wheel_example"
},
{
"job_cluster_key": "default",
"libraries": [
{
"jar": "target/jar1.jar"
},
{
"jar": "target/jar2.jar"
}
],
"spark_jar_task": {
"main_class_name": "com.example.Main"
},
"task_key": "spark_jar_example"
},
{
"for_each_task": {
"task": {
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
}
}
},
"job_cluster_key": "default",
"task_key": "for_each_notebook_example"
},
{
"for_each_task": {
"task": {
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
}
}
},
"task_key": "for_each_spark_python_example"
}
]

View File

@ -0,0 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook2"
}
}
]

View File

@ -0,0 +1,18 @@
>>> $CLI bundle validate -t development -o json
Exit code: 0
>>> $CLI bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: nominal
Target: error
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/nominal/error
Found 1 error
Exit code: 1

View File

@ -4,38 +4,45 @@ resources:
name: "placeholder"
tasks:
- task_key: notebook_example
job_cluster_key: default
notebook_task:
notebook_path: "this value is overridden"
- task_key: spark_python_example
job_cluster_key: default
spark_python_task:
python_file: "this value is overridden"
- task_key: dbt_example
job_cluster_key: default
dbt_task:
project_directory: "this value is overridden"
commands:
- "dbt run"
- task_key: sql_example
job_cluster_key: default
sql_task:
file:
path: "this value is overridden"
warehouse_id: cafef00d
- task_key: python_wheel_example
job_cluster_key: default
python_wheel_task:
package_name: my_package
libraries:
- whl: ../dist/wheel1.whl
- task_key: spark_jar_example
job_cluster_key: default
spark_jar_task:
main_class_name: com.example.Main
libraries:
- jar: ../target/jar1.jar
- task_key: for_each_notebook_example
job_cluster_key: default
for_each_task:
task:
notebook_task:
@ -44,5 +51,12 @@ resources:
- task_key: for_each_spark_python_example
for_each_task:
task:
job_cluster_key: default
spark_python_task:
python_file: "this value is overridden"
# Include a job cluster for completeness
job_clusters:
- job_cluster_key: default
new_cluster:
spark_version: 15.4.x-scala2.12

View File

@ -0,0 +1,10 @@
errcode trace $CLI bundle validate -t development -o json > output.tmp.json
# Capture job tasks
jq '.resources.jobs.my_job.tasks' output.tmp.json > output.job.json
# Capture pipeline libraries
jq '.resources.pipelines.my_pipeline.libraries' output.tmp.json > output.pipeline.json
# Expect failure for the "error" target
errcode trace $CLI bundle validate -t error

View File

@ -0,0 +1 @@
rm -f output.tmp.json

View File

@ -0,0 +1,6 @@
{
"paths": [
"/Workspace/remote/src/file1.py",
"/Workspace/remote/src/file1.py"
]
}

View File

@ -0,0 +1,6 @@
{
"paths": [
"/Workspace/remote/src/file2.py",
"/Workspace/remote/src/file2.py"
]
}

View File

@ -0,0 +1,4 @@
>>> $CLI bundle validate -t default -o json
>>> $CLI bundle validate -t override -o json

View File

@ -3,12 +3,20 @@ resources:
job:
tasks:
- task_key: local
job_cluster_key: default
spark_python_task:
python_file: ../src/file1.py
- task_key: variable_reference
job_cluster_key: default
spark_python_task:
# Note: this is a pure variable reference yet needs to persist the location
# of the reference, not the location of the variable value.
# Also see https://github.com/databricks/cli/issues/1330.
python_file: ${var.file_path}
# Include a job cluster for completeness
job_clusters:
- job_cluster_key: default
new_cluster:
spark_version: 15.4.x-scala2.12

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate -t default -o json | \
jq '{ paths: [.resources.jobs.job.tasks[].spark_python_task.python_file] }' > output.default.json
trace $CLI bundle validate -t override -o json | \
jq '{ paths: [.resources.jobs.job.tasks[].spark_python_task.python_file] }' > output.override.json

View File

@ -1,112 +0,0 @@
package config_tests
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPathTranslationFallback(t *testing.T) {
b := loadTarget(t, "./path_translation/fallback", "development")
m := mutator.TranslatePaths()
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error())
j := b.Config.Resources.Jobs["my_job"]
assert.Len(t, j.Tasks, 6)
assert.Equal(t, "notebook_example", filepath.ToSlash(j.Tasks[0].TaskKey))
assert.Equal(t, "src/notebook", filepath.ToSlash(j.Tasks[0].NotebookTask.NotebookPath))
assert.Equal(t, "spark_python_example", filepath.ToSlash(j.Tasks[1].TaskKey))
assert.Equal(t, "src/file.py", filepath.ToSlash(j.Tasks[1].SparkPythonTask.PythonFile))
assert.Equal(t, "dbt_example", filepath.ToSlash(j.Tasks[2].TaskKey))
assert.Equal(t, "src/dbt_project", filepath.ToSlash(j.Tasks[2].DbtTask.ProjectDirectory))
assert.Equal(t, "sql_example", filepath.ToSlash(j.Tasks[3].TaskKey))
assert.Equal(t, "src/sql.sql", filepath.ToSlash(j.Tasks[3].SqlTask.File.Path))
assert.Equal(t, "python_wheel_example", filepath.ToSlash(j.Tasks[4].TaskKey))
assert.Equal(t, "dist/wheel1.whl", filepath.ToSlash(j.Tasks[4].Libraries[0].Whl))
assert.Equal(t, "dist/wheel2.whl", filepath.ToSlash(j.Tasks[4].Libraries[1].Whl))
assert.Equal(t, "spark_jar_example", filepath.ToSlash(j.Tasks[5].TaskKey))
assert.Equal(t, "target/jar1.jar", filepath.ToSlash(j.Tasks[5].Libraries[0].Jar))
assert.Equal(t, "target/jar2.jar", filepath.ToSlash(j.Tasks[5].Libraries[1].Jar))
p := b.Config.Resources.Pipelines["my_pipeline"]
assert.Len(t, p.Libraries, 4)
assert.Equal(t, "src/file1.py", filepath.ToSlash(p.Libraries[0].File.Path))
assert.Equal(t, "src/notebook1", filepath.ToSlash(p.Libraries[1].Notebook.Path))
assert.Equal(t, "src/file2.py", filepath.ToSlash(p.Libraries[2].File.Path))
assert.Equal(t, "src/notebook2", filepath.ToSlash(p.Libraries[3].Notebook.Path))
}
func TestPathTranslationFallbackError(t *testing.T) {
b := loadTarget(t, "./path_translation/fallback", "error")
m := mutator.TranslatePaths()
diags := bundle.Apply(context.Background(), b, m)
assert.ErrorContains(t, diags.Error(), `notebook this value is overridden not found`)
}
func TestPathTranslationNominal(t *testing.T) {
b := loadTarget(t, "./path_translation/nominal", "development")
m := mutator.TranslatePaths()
diags := bundle.Apply(context.Background(), b, m)
assert.NoError(t, diags.Error())
j := b.Config.Resources.Jobs["my_job"]
assert.Len(t, j.Tasks, 8)
assert.Equal(t, "notebook_example", filepath.ToSlash(j.Tasks[0].TaskKey))
assert.Equal(t, "src/notebook", filepath.ToSlash(j.Tasks[0].NotebookTask.NotebookPath))
assert.Equal(t, "spark_python_example", filepath.ToSlash(j.Tasks[1].TaskKey))
assert.Equal(t, "src/file.py", filepath.ToSlash(j.Tasks[1].SparkPythonTask.PythonFile))
assert.Equal(t, "dbt_example", filepath.ToSlash(j.Tasks[2].TaskKey))
assert.Equal(t, "src/dbt_project", filepath.ToSlash(j.Tasks[2].DbtTask.ProjectDirectory))
assert.Equal(t, "sql_example", filepath.ToSlash(j.Tasks[3].TaskKey))
assert.Equal(t, "src/sql.sql", filepath.ToSlash(j.Tasks[3].SqlTask.File.Path))
assert.Equal(t, "python_wheel_example", filepath.ToSlash(j.Tasks[4].TaskKey))
assert.Equal(t, "dist/wheel1.whl", filepath.ToSlash(j.Tasks[4].Libraries[0].Whl))
assert.Equal(t, "dist/wheel2.whl", filepath.ToSlash(j.Tasks[4].Libraries[1].Whl))
assert.Equal(t, "spark_jar_example", filepath.ToSlash(j.Tasks[5].TaskKey))
assert.Equal(t, "target/jar1.jar", filepath.ToSlash(j.Tasks[5].Libraries[0].Jar))
assert.Equal(t, "target/jar2.jar", filepath.ToSlash(j.Tasks[5].Libraries[1].Jar))
assert.Equal(t, "for_each_notebook_example", filepath.ToSlash(j.Tasks[6].TaskKey))
assert.Equal(t, "src/notebook", filepath.ToSlash(j.Tasks[6].ForEachTask.Task.NotebookTask.NotebookPath))
assert.Equal(t, "for_each_spark_python_example", filepath.ToSlash(j.Tasks[7].TaskKey))
assert.Equal(t, "src/file.py", filepath.ToSlash(j.Tasks[7].ForEachTask.Task.SparkPythonTask.PythonFile))
p := b.Config.Resources.Pipelines["my_pipeline"]
assert.Len(t, p.Libraries, 4)
assert.Equal(t, "src/file1.py", filepath.ToSlash(p.Libraries[0].File.Path))
assert.Equal(t, "src/notebook1", filepath.ToSlash(p.Libraries[1].Notebook.Path))
assert.Equal(t, "src/file2.py", filepath.ToSlash(p.Libraries[2].File.Path))
assert.Equal(t, "src/notebook2", filepath.ToSlash(p.Libraries[3].Notebook.Path))
}
func TestPathTranslationNominalError(t *testing.T) {
b := loadTarget(t, "./path_translation/nominal", "error")
m := mutator.TranslatePaths()
diags := bundle.Apply(context.Background(), b, m)
assert.ErrorContains(t, diags.Error(), `notebook this value is overridden not found`)
}

View File

@ -1,28 +0,0 @@
package config_tests
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestRelativePathTranslationDefault(t *testing.T) {
b, diags := initializeTarget(t, "./relative_path_translation", "default")
require.NoError(t, diags.Error())
t0 := b.Config.Resources.Jobs["job"].Tasks[0]
assert.Equal(t, "/Workspace/remote/src/file1.py", t0.SparkPythonTask.PythonFile)
t1 := b.Config.Resources.Jobs["job"].Tasks[1]
assert.Equal(t, "/Workspace/remote/src/file1.py", t1.SparkPythonTask.PythonFile)
}
func TestRelativePathTranslationOverride(t *testing.T) {
b, diags := initializeTarget(t, "./relative_path_translation", "override")
require.NoError(t, diags.Error())
t0 := b.Config.Resources.Jobs["job"].Tasks[0]
assert.Equal(t, "/Workspace/remote/src/file2.py", t0.SparkPythonTask.PythonFile)
t1 := b.Config.Resources.Jobs["job"].Tasks[1]
assert.Equal(t, "/Workspace/remote/src/file2.py", t1.SparkPythonTask.PythonFile)
}