mirror of https://github.com/databricks/cli.git
Compare commits
No commits in common. "a08b59d4dd21ab5b7303078a4510c6bdefa6a062" and "b6fcc1d1dbe97574e0b585346ced42dde076f980" have entirely different histories.
a08b59d4dd
...
b6fcc1d1db
|
@ -2,9 +2,7 @@ package mutator_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
|
@ -456,89 +454,52 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestApplyPresetsCatalogSchema(t *testing.T) {
|
// func TestApplyPresetsRecommendsCatalogSchemaUsage(t *testing.T) {
|
||||||
// Create a bundle in a known mode, e.g. development or production doesn't matter much here.
|
// dir := t.TempDir()
|
||||||
b := mockBundle(config.Development)
|
|
||||||
// Set the catalog and schema in presets.
|
|
||||||
b.Config.Presets.Catalog = "my_catalog"
|
|
||||||
b.Config.Presets.Schema = "my_schema"
|
|
||||||
|
|
||||||
ctx := context.Background()
|
// ...
|
||||||
diags := bundle.Apply(ctx, b, mutator.ApplyPresets())
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
|
|
||||||
// Verify that jobs got catalog/schema if they support it.
|
// b := &bundle.Bundle{
|
||||||
// For DBT tasks in jobs:
|
// Config: config.Root{
|
||||||
for _, job := range b.Config.Resources.Jobs {
|
// Resources: config.Resources{
|
||||||
if job.JobSettings != nil && job.Tasks != nil {
|
// Jobs: map[string]*resources.Job{
|
||||||
for _, task := range job.Tasks {
|
// "job1": {
|
||||||
if task.DbtTask != nil {
|
// JobSettings: &jobs.JobSettings{
|
||||||
require.Equal(t, "my_catalog", task.DbtTask.Catalog, "dbt catalog should be set")
|
// Tasks: []jobs.Task{
|
||||||
require.Equal(t, "my_schema", task.DbtTask.Schema, "dbt schema should be set")
|
// {
|
||||||
}
|
// NotebookTask: &jobs.NotebookTask{
|
||||||
}
|
// NotebookPath: notebookPath,
|
||||||
}
|
// },
|
||||||
}
|
// },
|
||||||
|
// {
|
||||||
|
// SparkPythonTask: &jobs.SparkPythonTask{
|
||||||
|
// PythonFile: pythonPath,
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// NotebookTask: &jobs.NotebookTask{
|
||||||
|
// NotebookPath: "/Workspace/absolute/path/notebook",
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// }
|
||||||
|
|
||||||
// Pipelines: Catalog/Schema
|
// ctx := context.Background()
|
||||||
for _, p := range b.Config.Resources.Pipelines {
|
// diags := bundle.Apply(ctx, b, ApplyPresets())
|
||||||
if p.PipelineSpec != nil {
|
// require.Len(t, diags, 2)
|
||||||
// pipeline catalog and schema
|
|
||||||
if p.Catalog == "" || p.Catalog == "hive_metastore" {
|
|
||||||
require.Equal(t, "my_catalog", p.Catalog, "pipeline catalog should be set")
|
|
||||||
}
|
|
||||||
require.Equal(t, "my_schema", p.Target, "pipeline schema (target) should be set")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Registered models: Catalog/Schema
|
// // Check notebook diagnostic
|
||||||
for _, rm := range b.Config.Resources.RegisteredModels {
|
// assert.Equal(t, notebookPath, diags[0].Locations[0].File)
|
||||||
if rm.CreateRegisteredModelRequest != nil {
|
// assert.Equal(t, 1, diags[0].Locations[0].Line)
|
||||||
require.Equal(t, "my_catalog", rm.CatalogName, "registered model catalog should be set")
|
// assert.Equal(t, 1, diags[0].Locations[0].Column)
|
||||||
require.Equal(t, "my_schema", rm.SchemaName, "registered model schema should be set")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Quality monitors: If paused, we rewrite tableName to include catalog.schema.
|
// // Check Python script diagnostic
|
||||||
// In our code, if paused, we prepend catalog/schema if tableName wasn't already fully qualified.
|
// assert.Equal(t, pythonPath, diags[1].Locations[0].File)
|
||||||
// Let's verify that:
|
// assert.Equal(t, 1, diags[1].Locations[0].Line)
|
||||||
for _, qm := range b.Config.Resources.QualityMonitors {
|
// assert.Equal(t, 1, diags[1].Locations[0].Column)
|
||||||
// If not fully qualified (3 parts), it should have been rewritten.
|
// }
|
||||||
parts := strings.Split(qm.TableName, ".")
|
|
||||||
if len(parts) != 3 {
|
|
||||||
require.Equal(t, fmt.Sprintf("my_catalog.my_schema.%s", parts[0]), qm.TableName, "quality monitor tableName should include catalog and schema")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Schemas: If there's a schema preset, we might replace the schema name or catalog name.
|
|
||||||
for _, s := range b.Config.Resources.Schemas {
|
|
||||||
if s.CreateSchema != nil {
|
|
||||||
// If catalog was empty before, now should be set:
|
|
||||||
require.Equal(t, "my_catalog", s.CatalogName, "schema catalog should be set")
|
|
||||||
// If schema was empty before, it should be set, but we did have "schema1",
|
|
||||||
// so let's verify that if schema had a name, prefix logic may apply:
|
|
||||||
// The code attempts to handle schema naming carefully. If t.Schema != "" and s.Name == "",
|
|
||||||
// s.Name is set to t.Schema. Since s.Name was originally "schema1", it should remain "schema1" with prefix applied.
|
|
||||||
// If you want to verify behavior, do so explicitly if changed code logic.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Model serving endpoints currently return a warning that they don't support catalog/schema presets.
|
|
||||||
// We can just verify that the warning is generated or that no fields were set since they are not supported.
|
|
||||||
// The ApplyPresets code emits a diag error if we attempt to use catalog/schema with model serving endpoints.
|
|
||||||
// Let's check that we got an error diagnostic:
|
|
||||||
// The code currently returns a diag error if model serving endpoints are present and catalog/schema are set.
|
|
||||||
// So we verify diags here:
|
|
||||||
foundEndpointError := false
|
|
||||||
for _, d := range diags {
|
|
||||||
if strings.Contains(d.Summary, "model serving endpoints are not supported with catalog/schema presets") {
|
|
||||||
foundEndpointError = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
require.True(t, foundEndpointError, "should have diag error for model serving endpoints")
|
|
||||||
|
|
||||||
// Add assertions for any other resources that support catalog/schema if needed.
|
|
||||||
// This list is maintained manually. If you add new resource types that support catalog/schema,
|
|
||||||
// add them here as well.
|
|
||||||
}
|
|
||||||
|
|
|
@ -32,7 +32,6 @@
|
||||||
"sys.path.append('../src')\n",
|
"sys.path.append('../src')\n",
|
||||||
"from {{.project_name}} import main\n",
|
"from {{.project_name}} import main\n",
|
||||||
"\n",
|
"\n",
|
||||||
{{- /* We can use the short form here without 'dbutils.text()' since the widgets are defined in the metadata below. */}}
|
|
||||||
"catalog = dbutils.widgets.get('catalog')\n",
|
"catalog = dbutils.widgets.get('catalog')\n",
|
||||||
"schema = dbutils.widgets.get('schema')\n",
|
"schema = dbutils.widgets.get('schema')\n",
|
||||||
"spark.sql(f'USE {catalog}.{schema}')\n",
|
"spark.sql(f'USE {catalog}.{schema}')\n",
|
||||||
|
|
|
@ -24,7 +24,6 @@
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"# Load default catalog and schema as widget and set their values as the default catalog / schema\n",
|
"# Load default catalog and schema as widget and set their values as the default catalog / schema\n",
|
||||||
{{- /* We can use the short form here without 'dbutils.text()' since the widgets are defined in the metadata below. */}}
|
|
||||||
"catalog = dbutils.widgets.get('catalog')\n",
|
"catalog = dbutils.widgets.get('catalog')\n",
|
||||||
"schema = dbutils.widgets.get('schema')\n",
|
"schema = dbutils.widgets.get('schema')\n",
|
||||||
"spark.sql(f'USE {catalog}.{schema}')"
|
"spark.sql(f'USE {catalog}.{schema}')"
|
||||||
|
|
Loading…
Reference in New Issue