Format default-python template (#2110)

## Changes
Format code in default-python template, so it's already pre-formatted.

## Tests

```
$ databricks bundle init libs/template/templates/default-python
$ ruff format --diff my_project     
6 files already formatted
```
This commit is contained in:
Gleb Kanterov 2025-01-15 10:40:29 +01:00 committed by GitHub
parent 55494a0bda
commit 25f8ee8d66
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 30 additions and 19 deletions

View File

@ -29,7 +29,8 @@
"source": [
{{- if (eq .include_python "yes") }}
"import sys\n",
"sys.path.append('../src')\n",
"\n",
"sys.path.append(\"../src\")\n",
"from {{.project_name}} import main\n",
"\n",
"main.get_taxis(spark).show(10)"

View File

@ -5,28 +5,32 @@ This file is primarily used by the setuptools library and typically should not
be executed directly. See README.md for how to deploy, test, and run
the {{.project_name}} project.
"""
from setuptools import setup, find_packages
import sys
sys.path.append('./src')
sys.path.append("./src")
import datetime
import {{.project_name}}
local_version = datetime.datetime.utcnow().strftime("%Y%m%d.%H%M%S")
setup(
name="{{.project_name}}",
# We use timestamp as Local version identifier (https://peps.python.org/pep-0440/#local-version-identifiers.)
# to ensure that changes to wheel package are picked up when used on all-purpose clusters
version={{.project_name}}.__version__ + "+" + datetime.datetime.utcnow().strftime("%Y%m%d.%H%M%S"),
version={{.project_name}}.__version__ + "+" + local_version,
url="https://databricks.com",
author="{{user_name}}",
description="wheel file based on {{.project_name}}/src",
packages=find_packages(where='./src'),
package_dir={'': 'src'},
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main={{.project_name}}.main:main"
]
"main={{.project_name}}.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.

View File

@ -35,6 +35,7 @@
"# Import DLT and src/{{.project_name}}\n",
"import dlt\n",
"import sys\n",
"\n",
"sys.path.append(spark.conf.get(\"bundle.sourcePath\", \".\"))\n",
"from pyspark.sql.functions import expr\n",
"from {{.project_name}} import main"
@ -71,6 +72,7 @@
" return spark.read.format(\"json\").load(\"/databricks-datasets/nyctaxi/sample/json/\")\n",
{{end -}}
"\n",
"\n",
"@dlt.table\n",
"def filtered_taxis():\n",
" return dlt.read(\"taxi_raw\").filter(expr(\"fare_amount < 30\"))"

View File

@ -1,5 +1,6 @@
from pyspark.sql import SparkSession, DataFrame
def get_taxis(spark: SparkSession) -> DataFrame:
return spark.read.table("samples.nyctaxi.trips")
@ -10,12 +11,15 @@ def get_taxis(spark: SparkSession) -> DataFrame:
def get_spark() -> SparkSession:
try:
from databricks.connect import DatabricksSession
return DatabricksSession.builder.getOrCreate()
except ImportError:
return SparkSession.builder.getOrCreate()
def main():
get_taxis(get_spark()).show(5)
if __name__ == '__main__':
if __name__ == "__main__":
main()