Add sub-extension to resource files in built-in templates (#1777)

## Changes
We want to encourage a pattern of only specifying a single resource in a
YAML file when an `.<resource-type>.yml` (like `.job.yml`) is used. This
convention could allow us to bijectively map a resource YAML file to
it's corresponding resource in the Databricks workspace.

This PR simply makes the built-in templates compliant to this format.

## Tests
Existing tests.
This commit is contained in:
shreyas-goenka 2024-09-25 18:28:14 +05:30 committed by GitHub
parent b3a3071086
commit a4ba0bbe9f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 9 additions and 9 deletions

View File

@ -121,7 +121,7 @@ You can find that job by opening your workpace and clicking on **Workflows**.
You can also deploy to your production target directly from the command-line. You can also deploy to your production target directly from the command-line.
The warehouse, catalog, and schema for that target are configured in databricks.yml. The warehouse, catalog, and schema for that target are configured in databricks.yml.
When deploying to this target, note that the default job at resources/{{.project_name}}_job.yml When deploying to this target, note that the default job at resources/{{.project_name}}.job.yml
has a schedule set that runs every day. The schedule is paused when deploying in development mode has a schedule set that runs every day. The schedule is paused when deploying in development mode
(see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html). (see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).

View File

@ -18,7 +18,7 @@ This file only template directives; it is skipped for the actual output.
{{if $notDLT}} {{if $notDLT}}
{{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}} {{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}}
{{skip "{{.project_name}}/resources/{{.project_name}}_pipeline.yml"}} {{skip "{{.project_name}}/resources/{{.project_name}}.pipeline.yml"}}
{{end}} {{end}}
{{if $notNotebook}} {{if $notNotebook}}
@ -26,7 +26,7 @@ This file only template directives; it is skipped for the actual output.
{{end}} {{end}}
{{if (and $notDLT $notNotebook $notPython)}} {{if (and $notDLT $notNotebook $notPython)}}
{{skip "{{.project_name}}/resources/{{.project_name}}_job.yml"}} {{skip "{{.project_name}}/resources/{{.project_name}}.job.yml"}}
{{else}} {{else}}
{{skip "{{.project_name}}/resources/.gitkeep"}} {{skip "{{.project_name}}/resources/.gitkeep"}}
{{end}} {{end}}

View File

@ -29,7 +29,7 @@ The '{{.project_name}}' project was generated by using the default-python templa
``` ```
Note that the default job from the template has a schedule that runs every day Note that the default job from the template has a schedule that runs every day
(defined in resources/{{.project_name}}_job.yml). The schedule (defined in resources/{{.project_name}}.job.yml). The schedule
is paused when deploying in development mode (see is paused when deploying in development mode (see
https://docs.databricks.com/dev-tools/bundles/deployment-modes.html). https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).

View File

@ -40,7 +40,7 @@ resources:
- task_key: notebook_task - task_key: notebook_task
{{- end}} {{- end}}
pipeline_task: pipeline_task:
{{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}_pipeline.yml */}} {{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}.pipeline.yml */}}
pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id} pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id}
{{end -}} {{end -}}
{{- if (eq .include_python "yes") }} {{- if (eq .include_python "yes") }}

View File

@ -14,7 +14,7 @@
"source": [ "source": [
"# DLT pipeline\n", "# DLT pipeline\n",
"\n", "\n",
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}_pipeline.yml." "This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}.pipeline.yml."
] ]
}, },
{ {

View File

@ -14,7 +14,7 @@
"source": [ "source": [
"# Default notebook\n", "# Default notebook\n",
"\n", "\n",
"This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}_job.yml." "This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}.job.yml."
] ]
}, },
{ {

View File

@ -1,4 +1,4 @@
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml) -- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml)
USE CATALOG {{"{{"}}catalog{{"}}"}}; USE CATALOG {{"{{"}}catalog{{"}}"}};
USE IDENTIFIER({{"{{"}}schema{{"}}"}}); USE IDENTIFIER({{"{{"}}schema{{"}}"}});

View File

@ -1,4 +1,4 @@
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml) -- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml)
-- --
-- The streaming table below ingests all JSON files in /databricks-datasets/retail-org/sales_orders/ -- The streaming table below ingests all JSON files in /databricks-datasets/retail-org/sales_orders/
-- See also https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-create-streaming-table.html -- See also https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-create-streaming-table.html