mirror of https://github.com/databricks/cli.git
acc: Simplify integration_whl tests (#2479)
## Changes Instead of full templates, only template databricks.yml and use envsubst for that.
This commit is contained in:
parent
036061f8b5
commit
b2c87ae5d0
|
@ -2,25 +2,23 @@ bundle:
|
|||
name: wheel-task
|
||||
|
||||
workspace:
|
||||
root_path: "~/.bundle/{{.unique_id}}"
|
||||
root_path: "~/.bundle/$UNIQUE_NAME"
|
||||
|
||||
{{if .python_wheel_wrapper}}
|
||||
experimental:
|
||||
python_wheel_wrapper: true
|
||||
{{end}}
|
||||
include:
|
||||
- $EXTRA_CONFIG
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
some_other_job:
|
||||
name: "[${bundle.target}] Test Wheel Job {{.unique_id}}"
|
||||
name: "[${bundle.target}] Test Wheel Job $UNIQUE_NAME"
|
||||
tasks:
|
||||
- task_key: TestTask
|
||||
new_cluster:
|
||||
num_workers: 1
|
||||
spark_version: "{{.spark_version}}"
|
||||
node_type_id: "{{.node_type_id}}"
|
||||
spark_version: $DEFAULT_SPARK_VERSION
|
||||
node_type_id: $NODE_TYPE_ID
|
||||
data_security_mode: USER_ISOLATION
|
||||
instance_pool_id: "{{.instance_pool_id}}"
|
||||
instance_pool_id: $TEST_INSTANCE_POOL_ID
|
||||
python_wheel_task:
|
||||
package_name: my_test_code
|
||||
entry_point: run
|
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"properties": {
|
||||
"project_name": {
|
||||
"type": "string",
|
||||
"default": "my_test_code",
|
||||
"description": "Unique name for this project"
|
||||
},
|
||||
"spark_version": {
|
||||
"type": "string",
|
||||
"description": "Spark version used for job cluster"
|
||||
},
|
||||
"node_type_id": {
|
||||
"type": "string",
|
||||
"description": "Node type id for job cluster"
|
||||
},
|
||||
"unique_id": {
|
||||
"type": "string",
|
||||
"description": "Unique ID for job name"
|
||||
},
|
||||
"python_wheel_wrapper": {
|
||||
"type": "boolean",
|
||||
"description": "Whether or not to enable python wheel wrapper"
|
||||
},
|
||||
"instance_pool_id": {
|
||||
"type": "string",
|
||||
"description": "Instance pool id for job cluster"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,15 +1,4 @@
|
|||
|
||||
>>> cat input.json
|
||||
{
|
||||
"project_name": "my_test_code",
|
||||
"spark_version": "13.3.x-snapshot-scala2.12",
|
||||
"node_type_id": "[NODE_TYPE_ID]",
|
||||
"unique_id": "[UNIQUE_NAME]",
|
||||
"python_wheel_wrapper": false,
|
||||
"instance_pool_id": "[TEST_INSTANCE_POOL_ID]"
|
||||
}
|
||||
✨ Successfully initialized template
|
||||
|
||||
>>> cat databricks.yml
|
||||
bundle:
|
||||
name: wheel-task
|
||||
|
@ -17,7 +6,8 @@ bundle:
|
|||
workspace:
|
||||
root_path: "~/.bundle/[UNIQUE_NAME]"
|
||||
|
||||
|
||||
include:
|
||||
- empty.yml
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
|
@ -27,10 +17,10 @@ resources:
|
|||
- task_key: TestTask
|
||||
new_cluster:
|
||||
num_workers: 1
|
||||
spark_version: "13.3.x-snapshot-scala2.12"
|
||||
node_type_id: "[NODE_TYPE_ID]"
|
||||
spark_version: 13.3.x-snapshot-scala2.12
|
||||
node_type_id: [NODE_TYPE_ID]
|
||||
data_security_mode: USER_ISOLATION
|
||||
instance_pool_id: "[TEST_INSTANCE_POOL_ID]"
|
||||
instance_pool_id: [TEST_INSTANCE_POOL_ID]
|
||||
python_wheel_task:
|
||||
package_name: my_test_code
|
||||
entry_point: run
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
experimental:
|
||||
python_wheel_wrapper: true
|
|
@ -1,8 +1,5 @@
|
|||
export SPARK_VERSION=$DEFAULT_SPARK_VERSION
|
||||
export PYTHON_WHEEL_WRAPPER=false
|
||||
envsubst < input.json.tmpl > input.json
|
||||
trace cat input.json
|
||||
$CLI bundle init . --config-file input.json
|
||||
export EXTRA_CONFIG=empty.yml
|
||||
envsubst < databricks.yml.tmpl > databricks.yml
|
||||
trace cat databricks.yml
|
||||
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
|
||||
trace $CLI bundle deploy
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import my_test_code
|
||||
|
||||
setup(
|
||||
name="my_test_code",
|
||||
version=my_test_code.__version__,
|
||||
author=my_test_code.__author__,
|
||||
url="https://databricks.com",
|
||||
author_email="john.doe@databricks.com",
|
||||
description="my example wheel",
|
||||
packages=find_packages(include=["my_test_code"]),
|
||||
entry_points={"group1": "run=my_test_code.__main__:main"},
|
||||
install_requires=["setuptools"],
|
||||
)
|
|
@ -1,15 +0,0 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import {{.project_name}}
|
||||
|
||||
setup(
|
||||
name="{{.project_name}}",
|
||||
version={{.project_name}}.__version__,
|
||||
author={{.project_name}}.__author__,
|
||||
url="https://databricks.com",
|
||||
author_email="john.doe@databricks.com",
|
||||
description="my example wheel",
|
||||
packages=find_packages(include=["{{.project_name}}"]),
|
||||
entry_points={"group1": "run={{.project_name}}.__main__:main"},
|
||||
install_requires=["setuptools"],
|
||||
)
|
|
@ -1,15 +1,4 @@
|
|||
|
||||
>>> cat input.json
|
||||
{
|
||||
"project_name": "my_test_code",
|
||||
"spark_version": "13.3.x-snapshot-scala2.12",
|
||||
"node_type_id": "[NODE_TYPE_ID]",
|
||||
"unique_id": "[UNIQUE_NAME]",
|
||||
"python_wheel_wrapper": false,
|
||||
"instance_pool_id": "[TEST_INSTANCE_POOL_ID]"
|
||||
}
|
||||
✨ Successfully initialized template
|
||||
|
||||
>>> cat databricks.yml
|
||||
bundle:
|
||||
name: wheel-task
|
||||
|
@ -17,7 +6,8 @@ bundle:
|
|||
workspace:
|
||||
root_path: "~/.bundle/[UNIQUE_NAME]"
|
||||
|
||||
|
||||
include:
|
||||
- empty.yml
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
|
@ -27,10 +17,10 @@ resources:
|
|||
- task_key: TestTask
|
||||
new_cluster:
|
||||
num_workers: 1
|
||||
spark_version: "13.3.x-snapshot-scala2.12"
|
||||
node_type_id: "[NODE_TYPE_ID]"
|
||||
spark_version: 13.3.x-snapshot-scala2.12
|
||||
node_type_id: [NODE_TYPE_ID]
|
||||
data_security_mode: USER_ISOLATION
|
||||
instance_pool_id: "[TEST_INSTANCE_POOL_ID]"
|
||||
instance_pool_id: [TEST_INSTANCE_POOL_ID]
|
||||
python_wheel_task:
|
||||
package_name: my_test_code
|
||||
entry_point: run
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
export SPARK_VERSION=$DEFAULT_SPARK_VERSION
|
||||
export PYTHON_WHEEL_WRAPPER=false
|
||||
envsubst < $TESTDIR/../base/input.json.tmpl > input.json
|
||||
trace cat input.json
|
||||
$CLI bundle init $TESTDIR/../base --config-file input.json
|
||||
export EXTRA_CONFIG=empty.yml
|
||||
envsubst < $TESTDIR/../base/databricks.yml.tmpl > databricks.yml
|
||||
cp -r $TESTDIR/../base/{$EXTRA_CONFIG,setup.py,my_test_code} .
|
||||
trace cat databricks.yml
|
||||
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
|
||||
trace $CLI bundle deploy
|
||||
|
|
|
@ -2,20 +2,20 @@ bundle:
|
|||
name: wheel-task
|
||||
|
||||
workspace:
|
||||
root_path: "~/.bundle/{{.unique_id}}"
|
||||
root_path: "~/.bundle/$UNIQUE_NAME"
|
||||
|
||||
resources:
|
||||
clusters:
|
||||
test_cluster:
|
||||
cluster_name: "test-cluster-{{.unique_id}}"
|
||||
spark_version: "{{.spark_version}}"
|
||||
node_type_id: "{{.node_type_id}}"
|
||||
cluster_name: "test-cluster-$UNIQUE_NAME"
|
||||
spark_version: "$DEFAULT_SPARK_VERSION"
|
||||
node_type_id: "$NODE_TYPE_ID"
|
||||
num_workers: 1
|
||||
data_security_mode: USER_ISOLATION
|
||||
|
||||
jobs:
|
||||
some_other_job:
|
||||
name: "[${bundle.target}] Test Wheel Job {{.unique_id}}"
|
||||
name: "[${bundle.target}] Test Wheel Job $UNIQUE_NAME"
|
||||
tasks:
|
||||
- task_key: TestTask
|
||||
existing_cluster_id: "${resources.clusters.test_cluster.cluster_id}"
|
|
@ -1,25 +0,0 @@
|
|||
{
|
||||
"properties": {
|
||||
"project_name": {
|
||||
"type": "string",
|
||||
"default": "my_test_code",
|
||||
"description": "Unique name for this project"
|
||||
},
|
||||
"spark_version": {
|
||||
"type": "string",
|
||||
"description": "Spark version used for job cluster"
|
||||
},
|
||||
"node_type_id": {
|
||||
"type": "string",
|
||||
"description": "Node type id for job cluster"
|
||||
},
|
||||
"unique_id": {
|
||||
"type": "string",
|
||||
"description": "Unique ID for job name"
|
||||
},
|
||||
"instance_pool_id": {
|
||||
"type": "string",
|
||||
"description": "Instance pool id for job cluster"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,15 +1,4 @@
|
|||
|
||||
>>> cat input.json
|
||||
{
|
||||
"project_name": "my_test_code",
|
||||
"spark_version": "13.3.x-snapshot-scala2.12",
|
||||
"node_type_id": "[NODE_TYPE_ID]",
|
||||
"unique_id": "[UNIQUE_NAME]",
|
||||
"python_wheel_wrapper": false,
|
||||
"instance_pool_id": "[TEST_INSTANCE_POOL_ID]"
|
||||
}
|
||||
✨ Successfully initialized template
|
||||
|
||||
>>> cat databricks.yml
|
||||
bundle:
|
||||
name: wheel-task
|
||||
|
|
|
@ -1,8 +1,4 @@
|
|||
export SPARK_VERSION=$DEFAULT_SPARK_VERSION
|
||||
export PYTHON_WHEEL_WRAPPER=false
|
||||
envsubst < $TESTDIR/../base/input.json.tmpl > input.json
|
||||
trace cat input.json
|
||||
$CLI bundle init . --config-file input.json
|
||||
envsubst < databricks.yml.tmpl > databricks.yml
|
||||
trace cat databricks.yml
|
||||
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
|
||||
trace $CLI bundle deploy
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import my_test_code
|
||||
|
||||
setup(
|
||||
name="my_test_code",
|
||||
version=my_test_code.__version__,
|
||||
author=my_test_code.__author__,
|
||||
url="https://databricks.com",
|
||||
author_email="john.doe@databricks.com",
|
||||
description="my example wheel",
|
||||
packages=find_packages(include=["my_test_code"]),
|
||||
entry_points={"group1": "run=my_test_code.__main__:main"},
|
||||
install_requires=["setuptools"],
|
||||
)
|
|
@ -1,15 +0,0 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import {{.project_name}}
|
||||
|
||||
setup(
|
||||
name="{{.project_name}}",
|
||||
version={{.project_name}}.__version__,
|
||||
author={{.project_name}}.__author__,
|
||||
url="https://databricks.com",
|
||||
author_email="john.doe@databricks.com",
|
||||
description="my example wheel",
|
||||
packages=find_packages(include=["{{.project_name}}"]),
|
||||
entry_points={"group1": "run={{.project_name}}.__main__:main"},
|
||||
install_requires=["setuptools"],
|
||||
)
|
|
@ -7,8 +7,9 @@ Ignore = [
|
|||
"my_test_code",
|
||||
"my_test_code.egg-info",
|
||||
"setup.py",
|
||||
"input.json",
|
||||
"databricks.yml",
|
||||
"python_wheel_wrapper.yml",
|
||||
"empty.yml",
|
||||
]
|
||||
|
||||
[[Repls]]
|
||||
|
|
|
@ -1,15 +1,4 @@
|
|||
|
||||
>>> cat input.json
|
||||
{
|
||||
"project_name": "my_test_code",
|
||||
"spark_version": "12.2.x-scala2.12",
|
||||
"node_type_id": "[NODE_TYPE_ID]",
|
||||
"unique_id": "[UNIQUE_NAME]",
|
||||
"python_wheel_wrapper": true,
|
||||
"instance_pool_id": "[TEST_INSTANCE_POOL_ID]"
|
||||
}
|
||||
✨ Successfully initialized template
|
||||
|
||||
>>> cat databricks.yml
|
||||
bundle:
|
||||
name: wheel-task
|
||||
|
@ -17,10 +6,8 @@ bundle:
|
|||
workspace:
|
||||
root_path: "~/.bundle/[UNIQUE_NAME]"
|
||||
|
||||
|
||||
experimental:
|
||||
python_wheel_wrapper: true
|
||||
|
||||
include:
|
||||
- python_wheel_wrapper.yml
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
|
@ -30,10 +17,10 @@ resources:
|
|||
- task_key: TestTask
|
||||
new_cluster:
|
||||
num_workers: 1
|
||||
spark_version: "12.2.x-scala2.12"
|
||||
node_type_id: "[NODE_TYPE_ID]"
|
||||
spark_version: 12.2.x-scala2.12
|
||||
node_type_id: [NODE_TYPE_ID]
|
||||
data_security_mode: USER_ISOLATION
|
||||
instance_pool_id: "[TEST_INSTANCE_POOL_ID]"
|
||||
instance_pool_id: [TEST_INSTANCE_POOL_ID]
|
||||
python_wheel_task:
|
||||
package_name: my_test_code
|
||||
entry_point: run
|
||||
|
|
|
@ -2,11 +2,10 @@
|
|||
# But before users used older DBRs and python wheel tasks but installed it from DBFS.
|
||||
# We still want to support older DBRs and did the trampoline workaround (https://github.com/databricks/cli/pull/635)
|
||||
# Hence this is to test that python wheel tasks in DABs are working for older DBRs
|
||||
export SPARK_VERSION=12.2.x-scala2.12
|
||||
export PYTHON_WHEEL_WRAPPER=true
|
||||
envsubst < $TESTDIR/../base/input.json.tmpl > input.json
|
||||
trace cat input.json
|
||||
$CLI bundle init $TESTDIR/../base --config-file input.json
|
||||
export DEFAULT_SPARK_VERSION=12.2.x-scala2.12
|
||||
export EXTRA_CONFIG=python_wheel_wrapper.yml
|
||||
envsubst < $TESTDIR/../base/databricks.yml.tmpl > databricks.yml
|
||||
cp -r $TESTDIR/../base/{$EXTRA_CONFIG,setup.py,my_test_code} .
|
||||
trace cat databricks.yml
|
||||
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
|
||||
trace $CLI bundle deploy
|
||||
|
|
|
@ -1,15 +1,4 @@
|
|||
|
||||
>>> cat input.json
|
||||
{
|
||||
"project_name": "my_test_code",
|
||||
"spark_version": "12.2.x-scala2.12",
|
||||
"node_type_id": "[NODE_TYPE_ID]",
|
||||
"unique_id": "[UNIQUE_NAME]",
|
||||
"python_wheel_wrapper": true,
|
||||
"instance_pool_id": "[TEST_INSTANCE_POOL_ID]"
|
||||
}
|
||||
✨ Successfully initialized template
|
||||
|
||||
>>> cat databricks.yml
|
||||
bundle:
|
||||
name: wheel-task
|
||||
|
@ -17,10 +6,8 @@ bundle:
|
|||
workspace:
|
||||
root_path: "~/.bundle/[UNIQUE_NAME]"
|
||||
|
||||
|
||||
experimental:
|
||||
python_wheel_wrapper: true
|
||||
|
||||
include:
|
||||
- python_wheel_wrapper.yml
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
|
@ -30,10 +17,10 @@ resources:
|
|||
- task_key: TestTask
|
||||
new_cluster:
|
||||
num_workers: 1
|
||||
spark_version: "12.2.x-scala2.12"
|
||||
node_type_id: "[NODE_TYPE_ID]"
|
||||
spark_version: 12.2.x-scala2.12
|
||||
node_type_id: [NODE_TYPE_ID]
|
||||
data_security_mode: USER_ISOLATION
|
||||
instance_pool_id: "[TEST_INSTANCE_POOL_ID]"
|
||||
instance_pool_id: [TEST_INSTANCE_POOL_ID]
|
||||
python_wheel_task:
|
||||
package_name: my_test_code
|
||||
entry_point: run
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
export SPARK_VERSION=12.2.x-scala2.12
|
||||
export PYTHON_WHEEL_WRAPPER=true
|
||||
envsubst < $TESTDIR/../base/input.json.tmpl > input.json
|
||||
trace cat input.json
|
||||
$CLI bundle init $TESTDIR/../base --config-file input.json
|
||||
export DEFAULT_SPARK_VERSION=12.2.x-scala2.12
|
||||
export EXTRA_CONFIG=python_wheel_wrapper.yml
|
||||
envsubst < $TESTDIR/../base/databricks.yml.tmpl > databricks.yml
|
||||
cp -r $TESTDIR/../base/{$EXTRA_CONFIG,setup.py,my_test_code} .
|
||||
trace cat databricks.yml
|
||||
trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT
|
||||
trace $CLI bundle deploy
|
||||
|
|
Loading…
Reference in New Issue