acc: Added tests for deploying PyPi and Maven libraries (#2359)

## Changes
Added PyPi and Maven libraries tests

Needed for this PR since we don't currently have any coverage for PyPi
or Maven libraries
https://github.com/databricks/cli/pull/2382
This commit is contained in:
Andrew Nester 2025-02-26 12:05:38 +00:00 committed by GitHub
parent cdea775bd2
commit fa79d04980
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 123 additions and 0 deletions

View File

@ -0,0 +1 @@
.databricks

View File

@ -0,0 +1,27 @@
bundle:
name: maven
resources:
jobs:
testjob:
name: test-job
tasks:
- task_key: dbt
spark_jar_task:
main_class_name: com.databricks.example.Main
libraries:
- maven:
coordinates: org.jsoup:jsoup:1.7.2
new_cluster:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode

View File

@ -0,0 +1,7 @@
[
{
"maven": {
"coordinates": "org.jsoup:jsoup:1.7.2"
}
}
]

View File

@ -0,0 +1,15 @@
>>> [CLI] bundle validate -o json
[
{
"maven": {
"coordinates": "org.jsoup:jsoup:1.7.2"
}
}
]
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/maven/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate -o json | jq '.resources.jobs.testjob.tasks[0].libraries'
trace $CLI bundle deploy
cat out.requests.txt | jq 'select(.path == "/api/2.1/jobs/create")' | jq '.body.tasks[0].libraries' > out.job.libraries.txt
rm out.requests.txt

View File

@ -0,0 +1,5 @@
# We run this test only locally for now because we need to figure out how to do
# bundle destroy on script.cleanup first.
LocalOnly = true
RecordRequests = true

View File

@ -0,0 +1 @@
.databricks

View File

@ -0,0 +1,32 @@
bundle:
name: pypi
resources:
jobs:
testjob:
name: test-job
tasks:
- task_key: dbt
dbt_task:
project_directory: ./
profiles_directory: dbt_profiles/
commands:
- 'dbt deps --target=${bundle.target}'
- 'dbt seed --target=${bundle.target} --vars "{ dev_schema: ${workspace.current_user.short_name} }"'
- 'dbt run --target=${bundle.target} --vars "{ dev_schema: ${workspace.current_user.short_name} }"'
libraries:
- pypi:
package: dbt-databricks>=1.8.0,<2.0.0
new_cluster:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode

View File

@ -0,0 +1,7 @@
[
{
"pypi": {
"package": "dbt-databricks>=1.8.0,<2.0.0"
}
}
]

View File

@ -0,0 +1,15 @@
>>> [CLI] bundle validate -o json
[
{
"pypi": {
"package": "dbt-databricks>=1.8.0,<2.0.0"
}
}
]
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/pypi/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate -o json | jq '.resources.jobs.testjob.tasks[0].libraries'
trace $CLI bundle deploy
cat out.requests.txt | jq 'select(.path == "/api/2.1/jobs/create")' | jq '.body.tasks[0].libraries' > out.job.libraries.txt
rm out.requests.txt

View File

@ -0,0 +1,5 @@
# We run this test only locally for now because we need to figure out how to do
# bundle destroy on script.cleanup first.
LocalOnly = true
RecordRequests = true