mirror of https://github.com/databricks/cli.git
Minor default template tweaks (#758)
Minor template tweaks, mostly making the imports section for DLT notebooks a bit more elegant. Tested with DAB deployment + in-workspace UI.
This commit is contained in:
parent
d9a276b17d
commit
9e56bed593
|
@ -17,7 +17,7 @@ This file only template directives; it is skipped for the actual output.
|
|||
|
||||
{{if $notPython}}
|
||||
{{skip "{{.project_name}}/src/{{.project_name}}"}}
|
||||
{{skip "{{.project_name}}/tests/test_main.py"}}
|
||||
{{skip "{{.project_name}}/tests/main_test.py"}}
|
||||
{{skip "{{.project_name}}/setup.py"}}
|
||||
{{skip "{{.project_name}}/pytest.ini"}}
|
||||
{{end}}
|
||||
|
|
|
@ -20,7 +20,7 @@ The '{{.project_name}}' project was generated by using the default-python templa
|
|||
|
||||
This deploys everything that's defined for this project.
|
||||
For example, the default template would deploy a job called
|
||||
`[dev yourname] {{.project_name}}-job` to your workspace.
|
||||
`[dev yourname] {{.project_name}}_job` to your workspace.
|
||||
You can find that job by opening your workpace and clicking on **Workflows**.
|
||||
|
||||
4. Similarly, to deploy a production copy, type:
|
||||
|
|
|
@ -17,7 +17,7 @@ def get_absolute_path(*relative_parts):
|
|||
if 'dbutils' in globals():
|
||||
base_dir = os.path.dirname(dbutils.notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()) # type: ignore
|
||||
path = os.path.normpath(os.path.join(base_dir, *relative_parts))
|
||||
return path if path.startswith("/Workspace") else os.path.join("/Workspace", path)
|
||||
return path if path.startswith("/Workspace") else "/Workspace" + path
|
||||
else:
|
||||
return os.path.join(*relative_parts)
|
||||
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
resources:
|
||||
pipelines:
|
||||
{{.project_name}}_pipeline:
|
||||
name: "{{.project_name}}_pipeline"
|
||||
target: "{{.project_name}}_${bundle.environment}"
|
||||
name: {{.project_name}}_pipeline
|
||||
target: {{.project_name}}_${bundle.environment}
|
||||
libraries:
|
||||
- notebook:
|
||||
path: ../src/dlt_pipeline.ipynb
|
||||
|
||||
configuration:
|
||||
"bundle.sourcePath": "/Workspace/${workspace.file_path}/src"
|
||||
bundle.sourcePath: /Workspace/${workspace.file_path}/src
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
"source": [
|
||||
"# DLT pipeline\n",
|
||||
"\n",
|
||||
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.my_project}}_pipeline.yml."
|
||||
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}_pipeline.yml."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -27,28 +27,18 @@
|
|||
"nuid": "9198e987-5606-403d-9f6d-8f14e6a4017f",
|
||||
"showTitle": false,
|
||||
"title": ""
|
||||
},
|
||||
"jupyter": {
|
||||
{{- /* Collapse this cell by default. Just boring imports here! */}}
|
||||
"source_hidden": true
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
{{- if (eq .include_python "yes") }}
|
||||
"# Import DLT and make sure 'my_project' is on the Python path\n",
|
||||
"# Import DLT and src/{{.project_name}}\n",
|
||||
"import dlt\n",
|
||||
"from pyspark.sql.functions import expr\n",
|
||||
"from pyspark.sql import SparkSession\n",
|
||||
"spark = SparkSession.builder.getOrCreate()\n",
|
||||
"import sys\n",
|
||||
"try:\n",
|
||||
" sys.path.append(spark.conf.get(\"bundle.sourcePath\"))\n",
|
||||
"except:\n",
|
||||
" pass\n",
|
||||
"from my_project import main"
|
||||
"sys.path.append(spark.conf.get(\"bundle.sourcePath\", \".\"))\n",
|
||||
"from pyspark.sql.functions import expr\n",
|
||||
"from {{.project_name}} import main"
|
||||
{{else}}
|
||||
"# Import DLT\n",
|
||||
"import dlt\n",
|
||||
"from pyspark.sql.functions import expr\n",
|
||||
"from pyspark.sql import SparkSession\n",
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
"source": [
|
||||
"# Default notebook\n",
|
||||
"\n",
|
||||
"This default notebook is executed using Databricks Workflows as defined in resources/{{.my_project}}_job.yml."
|
||||
"This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}_job.yml."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue