fix references to the files

This commit is contained in:
Shreyas Goenka 2024-09-18 11:44:15 +02:00
parent 0b0e0a4156
commit 9b2fe7cfa9
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
9 changed files with 10 additions and 10 deletions

View File

@ -110,7 +110,7 @@ func TestPrepareBuiltInTemplatesWithRelativePaths(t *testing.T) {
func TestBuiltinPythonTemplateValid(t *testing.T) {
// Test option combinations
options := []string{"yes", "no"}
options := []string{"no"}
isServicePrincipal := false
catalog := "hive_metastore"
cachedCatalog = &catalog

View File

@ -121,7 +121,7 @@ You can find that job by opening your workpace and clicking on **Workflows**.
You can also deploy to your production target directly from the command-line.
The warehouse, catalog, and schema for that target are configured in databricks.yml.
When deploying to this target, note that the default job at resources/{{.project_name}}_job.yml
When deploying to this target, note that the default job at resources/{{.project_name}}.job.yml
has a schedule set that runs every day. The schedule is paused when deploying in development mode
(see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).

View File

@ -18,7 +18,7 @@ This file only template directives; it is skipped for the actual output.
{{if $notDLT}}
{{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}}
{{skip "{{.project_name}}/resources/{{.project_name}}_pipeline.yml"}}
{{skip "{{.project_name}}/resources/{{.project_name}}.pipeline.yml"}}
{{end}}
{{if $notNotebook}}
@ -26,7 +26,7 @@ This file only template directives; it is skipped for the actual output.
{{end}}
{{if (and $notDLT $notNotebook $notPython)}}
{{skip "{{.project_name}}/resources/{{.project_name}}_job.yml"}}
{{skip "{{.project_name}}/resources/{{.project_name}}.job.yml"}}
{{else}}
{{skip "{{.project_name}}/resources/.gitkeep"}}
{{end}}

View File

@ -29,7 +29,7 @@ The '{{.project_name}}' project was generated by using the default-python templa
```
Note that the default job from the template has a schedule that runs every day
(defined in resources/{{.project_name}}_job.yml). The schedule
(defined in resources/{{.project_name}}.job.yml). The schedule
is paused when deploying in development mode (see
https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).

View File

@ -40,7 +40,7 @@ resources:
- task_key: notebook_task
{{- end}}
pipeline_task:
{{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}_pipeline.yml */}}
{{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}.pipeline.yml */}}
pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id}
{{end -}}
{{- if (eq .include_python "yes") }}

View File

@ -14,7 +14,7 @@
"source": [
"# DLT pipeline\n",
"\n",
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}_pipeline.yml."
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}.pipeline.yml."
]
},
{

View File

@ -14,7 +14,7 @@
"source": [
"# Default notebook\n",
"\n",
"This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}_job.yml."
"This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}.job.yml."
]
},
{

View File

@ -1,4 +1,4 @@
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml)
USE CATALOG {{"{{"}}catalog{{"}}"}};
USE IDENTIFIER({{"{{"}}schema{{"}}"}});

View File

@ -1,4 +1,4 @@
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml)
--
-- The streaming table below ingests all JSON files in /databricks-datasets/retail-org/sales_orders/
-- See also https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-create-streaming-table.html