mirror of https://github.com/databricks/cli.git
Add sub-extension to resource files in built-in templates (#1777)
## Changes We want to encourage a pattern of only specifying a single resource in a YAML file when an `.<resource-type>.yml` (like `.job.yml`) is used. This convention could allow us to bijectively map a resource YAML file to it's corresponding resource in the Databricks workspace. This PR simply makes the built-in templates compliant to this format. ## Tests Existing tests.
This commit is contained in:
parent
b3a3071086
commit
a4ba0bbe9f
|
@ -121,7 +121,7 @@ You can find that job by opening your workpace and clicking on **Workflows**.
|
|||
|
||||
You can also deploy to your production target directly from the command-line.
|
||||
The warehouse, catalog, and schema for that target are configured in databricks.yml.
|
||||
When deploying to this target, note that the default job at resources/{{.project_name}}_job.yml
|
||||
When deploying to this target, note that the default job at resources/{{.project_name}}.job.yml
|
||||
has a schedule set that runs every day. The schedule is paused when deploying in development mode
|
||||
(see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ This file only template directives; it is skipped for the actual output.
|
|||
|
||||
{{if $notDLT}}
|
||||
{{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}}
|
||||
{{skip "{{.project_name}}/resources/{{.project_name}}_pipeline.yml"}}
|
||||
{{skip "{{.project_name}}/resources/{{.project_name}}.pipeline.yml"}}
|
||||
{{end}}
|
||||
|
||||
{{if $notNotebook}}
|
||||
|
@ -26,7 +26,7 @@ This file only template directives; it is skipped for the actual output.
|
|||
{{end}}
|
||||
|
||||
{{if (and $notDLT $notNotebook $notPython)}}
|
||||
{{skip "{{.project_name}}/resources/{{.project_name}}_job.yml"}}
|
||||
{{skip "{{.project_name}}/resources/{{.project_name}}.job.yml"}}
|
||||
{{else}}
|
||||
{{skip "{{.project_name}}/resources/.gitkeep"}}
|
||||
{{end}}
|
||||
|
|
|
@ -29,7 +29,7 @@ The '{{.project_name}}' project was generated by using the default-python templa
|
|||
```
|
||||
|
||||
Note that the default job from the template has a schedule that runs every day
|
||||
(defined in resources/{{.project_name}}_job.yml). The schedule
|
||||
(defined in resources/{{.project_name}}.job.yml). The schedule
|
||||
is paused when deploying in development mode (see
|
||||
https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ resources:
|
|||
- task_key: notebook_task
|
||||
{{- end}}
|
||||
pipeline_task:
|
||||
{{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}_pipeline.yml */}}
|
||||
{{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}.pipeline.yml */}}
|
||||
pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id}
|
||||
{{end -}}
|
||||
{{- if (eq .include_python "yes") }}
|
|
@ -14,7 +14,7 @@
|
|||
"source": [
|
||||
"# DLT pipeline\n",
|
||||
"\n",
|
||||
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}_pipeline.yml."
|
||||
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}.pipeline.yml."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
"source": [
|
||||
"# Default notebook\n",
|
||||
"\n",
|
||||
"This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}_job.yml."
|
||||
"This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}.job.yml."
|
||||
]
|
||||
},
|
||||
{
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
|
||||
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml)
|
||||
|
||||
USE CATALOG {{"{{"}}catalog{{"}}"}};
|
||||
USE IDENTIFIER({{"{{"}}schema{{"}}"}});
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
|
||||
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml)
|
||||
--
|
||||
-- The streaming table below ingests all JSON files in /databricks-datasets/retail-org/sales_orders/
|
||||
-- See also https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-create-streaming-table.html
|
||||
|
|
Loading…
Reference in New Issue