mirror of https://github.com/databricks/cli.git
Compare commits
5 Commits
02e83877f4
...
d3e221a116
Author | SHA1 | Date |
---|---|---|
dependabot[bot] | d3e221a116 | |
dependabot[bot] | 0ef1ada14b | |
dependabot[bot] | f19f7fa130 | |
dependabot[bot] | 90244f9c16 | |
Pieter Noordhuis | b451905b6e |
|
@ -1 +1 @@
|
|||
3eae49b444cac5a0118a3503e5b7ecef7f96527a
|
||||
d05898328669a3f8ab0c2ecee37db2673d3ea3f7
|
|
@ -39,7 +39,7 @@ func getLibDetails(v dyn.Value) (string, string, bool) {
|
|||
}
|
||||
|
||||
func findMatches(b *bundle.Bundle, path string) ([]string, error) {
|
||||
matches, err := filepath.Glob(filepath.Join(b.RootPath, path))
|
||||
matches, err := filepath.Glob(filepath.Join(b.SyncRootPath, path))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -52,10 +52,10 @@ func findMatches(b *bundle.Bundle, path string) ([]string, error) {
|
|||
}
|
||||
}
|
||||
|
||||
// We make the matched path relative to the root path before storing it
|
||||
// We make the matched path relative to the sync root path before storing it
|
||||
// to allow upload mutator to distinguish between local and remote paths
|
||||
for i, match := range matches {
|
||||
matches[i], err = filepath.Rel(b.RootPath, match)
|
||||
matches[i], err = filepath.Rel(b.SyncRootPath, match)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -211,8 +211,8 @@ func (e *expand) Name() string {
|
|||
|
||||
// ExpandGlobReferences expands any glob references in the libraries or environments section
|
||||
// to corresponding local paths.
|
||||
// We only expand local paths (i.e. paths that are relative to the root path).
|
||||
// After expanding we make the paths relative to the root path to allow upload mutator later in the chain to
|
||||
// We only expand local paths (i.e. paths that are relative to the sync root path).
|
||||
// After expanding we make the paths relative to the sync root path to allow upload mutator later in the chain to
|
||||
// distinguish between local and remote paths.
|
||||
func ExpandGlobReferences() bundle.Mutator {
|
||||
return &expand{}
|
||||
|
|
|
@ -23,7 +23,7 @@ func TestGlobReferencesExpandedForTaskLibraries(t *testing.T) {
|
|||
testutil.Touch(t, dir, "jar", "my2.jar")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: dir,
|
||||
SyncRootPath: dir,
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
|
@ -104,7 +104,7 @@ func TestGlobReferencesExpandedForForeachTaskLibraries(t *testing.T) {
|
|||
testutil.Touch(t, dir, "jar", "my2.jar")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: dir,
|
||||
SyncRootPath: dir,
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
|
@ -189,7 +189,7 @@ func TestGlobReferencesExpandedForEnvironmentsDeps(t *testing.T) {
|
|||
testutil.Touch(t, dir, "jar", "my2.jar")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: dir,
|
||||
SyncRootPath: dir,
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
|
|
|
@ -18,7 +18,7 @@ func TestValidateEnvironments(t *testing.T) {
|
|||
testutil.Touch(t, tmpDir, "wheel.whl")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
|
@ -50,7 +50,7 @@ func TestValidateEnvironmentsNoFile(t *testing.T) {
|
|||
tmpDir := t.TempDir()
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
|
@ -84,7 +84,7 @@ func TestValidateTaskLibraries(t *testing.T) {
|
|||
testutil.Touch(t, tmpDir, "wheel.whl")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
|
@ -117,7 +117,7 @@ func TestValidateTaskLibrariesNoFile(t *testing.T) {
|
|||
tmpDir := t.TempDir()
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
|
|
|
@ -74,7 +74,7 @@ func collectLocalLibraries(b *bundle.Bundle) (map[string][]configLocation, error
|
|||
return v, nil
|
||||
}
|
||||
|
||||
source = filepath.Join(b.RootPath, source)
|
||||
source = filepath.Join(b.SyncRootPath, source)
|
||||
libs[source] = append(libs[source], configLocation{
|
||||
configPath: p,
|
||||
location: v.Location(),
|
||||
|
|
|
@ -24,7 +24,7 @@ func TestArtifactUploadForWorkspace(t *testing.T) {
|
|||
whlLocalPath := filepath.Join(whlFolder, "source.whl")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Workspace: config.Workspace{
|
||||
ArtifactPath: "/foo/bar/artifacts",
|
||||
|
@ -112,7 +112,7 @@ func TestArtifactUploadForVolumes(t *testing.T) {
|
|||
whlLocalPath := filepath.Join(whlFolder, "source.whl")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Workspace: config.Workspace{
|
||||
ArtifactPath: "/Volumes/foo/bar/artifacts",
|
||||
|
@ -200,7 +200,7 @@ func TestArtifactUploadWithNoLibraryReference(t *testing.T) {
|
|||
whlLocalPath := filepath.Join(whlFolder, "source.whl")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Workspace: config.Workspace{
|
||||
ArtifactPath: "/Workspace/foo/bar/artifacts",
|
||||
|
@ -240,7 +240,7 @@ func TestUploadMultipleLibraries(t *testing.T) {
|
|||
testutil.Touch(t, whlFolder, "source4.whl")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
RootPath: tmpDir,
|
||||
SyncRootPath: tmpDir,
|
||||
Config: config.Root{
|
||||
Workspace: config.Workspace{
|
||||
ArtifactPath: "/foo/bar/artifacts",
|
||||
|
|
|
@ -2046,6 +2046,12 @@
|
|||
"instance_profile_arn": {
|
||||
"description": "ARN of the instance profile that the served model will use to access AWS resources."
|
||||
},
|
||||
"max_provisioned_throughput": {
|
||||
"description": "The maximum tokens per second that the endpoint can scale up to."
|
||||
},
|
||||
"min_provisioned_throughput": {
|
||||
"description": "The minimum tokens per second that the endpoint can scale down to."
|
||||
},
|
||||
"model_name": {
|
||||
"description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n"
|
||||
},
|
||||
|
@ -5147,6 +5153,12 @@
|
|||
"instance_profile_arn": {
|
||||
"description": "ARN of the instance profile that the served model will use to access AWS resources."
|
||||
},
|
||||
"max_provisioned_throughput": {
|
||||
"description": "The maximum tokens per second that the endpoint can scale up to."
|
||||
},
|
||||
"min_provisioned_throughput": {
|
||||
"description": "The minimum tokens per second that the endpoint can scale down to."
|
||||
},
|
||||
"model_name": {
|
||||
"description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n"
|
||||
},
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
bundle:
|
||||
name: python-wheel-local
|
||||
|
||||
workspace:
|
||||
artifact_path: /foo/bar
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
test_job:
|
||||
|
|
|
@ -15,11 +15,10 @@ import (
|
|||
)
|
||||
|
||||
func TestPythonWheelBuild(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/python_wheel")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/python_wheel", "default")
|
||||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build()))
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl")
|
||||
|
@ -32,11 +31,10 @@ func TestPythonWheelBuild(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPythonWheelBuildAutoDetect(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default")
|
||||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build()))
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl")
|
||||
|
@ -49,11 +47,10 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact_notebook")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default")
|
||||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build()))
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl")
|
||||
|
@ -66,11 +63,10 @@ func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPythonWheelWithDBFSLib(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_dbfs_lib")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default")
|
||||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build()))
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
match := libraries.ExpandGlobReferences()
|
||||
|
@ -79,11 +75,11 @@ func TestPythonWheelWithDBFSLib(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact_no_setup")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default")
|
||||
|
||||
b.Config.Workspace.ArtifactPath = "/foo/bar"
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
mockFiler := mockfiler.NewMockFiler(t)
|
||||
mockFiler.EXPECT().Write(
|
||||
|
@ -94,20 +90,20 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
|
|||
filer.CreateParentDirectories,
|
||||
).Return(nil)
|
||||
|
||||
u := libraries.UploadWithClient(mockFiler)
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build(), libraries.ExpandGlobReferences(), u))
|
||||
diags = bundle.Apply(ctx, b, bundle.Seq(
|
||||
libraries.ExpandGlobReferences(),
|
||||
libraries.UploadWithClient(mockFiler),
|
||||
))
|
||||
require.NoError(t, diags.Error())
|
||||
require.Empty(t, diags)
|
||||
|
||||
require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl)
|
||||
}
|
||||
|
||||
func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/environment_key")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/environment_key", "default")
|
||||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build()))
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl")
|
||||
|
@ -120,11 +116,10 @@ func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPythonWheelBuildMultiple(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_multiple")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default")
|
||||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build()))
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl")
|
||||
|
@ -137,11 +132,10 @@ func TestPythonWheelBuildMultiple(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPythonWheelNoBuild(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_build")
|
||||
require.NoError(t, err)
|
||||
b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default")
|
||||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build()))
|
||||
ctx := context.Background()
|
||||
diags := bundle.Apply(ctx, b, phases.Build())
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
match := libraries.ExpandGlobReferences()
|
||||
|
|
|
@ -941,7 +941,12 @@ func newListArtifacts() *cobra.Command {
|
|||
cmd.Long = `Get all artifacts.
|
||||
|
||||
List artifacts for a run. Takes an optional artifact_path prefix. If it is
|
||||
specified, the response contains only artifacts with the specified prefix.",`
|
||||
specified, the response contains only artifacts with the specified prefix.
|
||||
This API does not support pagination when listing artifacts in UC Volumes. A
|
||||
maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
|
||||
/api/2.0/fs/directories{directory_path} for listing artifacts in UC Volumes,
|
||||
which supports pagination. See [List directory contents | Files
|
||||
API](/api/workspace/files/listdirectorycontents).`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
|
|
@ -88,7 +88,9 @@ func newAssign() *cobra.Command {
|
|||
Arguments:
|
||||
WORKSPACE_ID: A workspace ID.
|
||||
METASTORE_ID: The unique ID of the metastore.
|
||||
DEFAULT_CATALOG_NAME: The name of the default catalog in the metastore.`
|
||||
DEFAULT_CATALOG_NAME: The name of the default catalog in the metastore. This field is depracted.
|
||||
Please use "Default Namespace API" to configure the default catalog for a
|
||||
Databricks workspace.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -665,7 +667,7 @@ func newUpdateAssignment() *cobra.Command {
|
|||
// TODO: short flags
|
||||
cmd.Flags().Var(&updateAssignmentJson, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
|
||||
cmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog for the metastore.`)
|
||||
cmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog in the metastore.`)
|
||||
cmd.Flags().StringVar(&updateAssignmentReq.MetastoreId, "metastore-id", updateAssignmentReq.MetastoreId, `The unique ID of the metastore.`)
|
||||
|
||||
cmd.Use = "update-assignment WORKSPACE_ID"
|
||||
|
|
|
@ -117,9 +117,10 @@ func newGet() *cobra.Command {
|
|||
|
||||
Arguments:
|
||||
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
|
||||
authorization, clusters, cluster-policies, dbsql-dashboards, directories,
|
||||
experiments, files, instance-pools, jobs, notebooks, pipelines, queries,
|
||||
registered-models, repos, serving-endpoints, or warehouses.
|
||||
authorization, clusters, cluster-policies, dashboards, dbsql-dashboards,
|
||||
directories, experiments, files, instance-pools, jobs, notebooks,
|
||||
pipelines, queries, registered-models, repos, serving-endpoints, or
|
||||
warehouses.
|
||||
REQUEST_OBJECT_ID: The id of the request object.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
@ -245,9 +246,10 @@ func newSet() *cobra.Command {
|
|||
|
||||
Arguments:
|
||||
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
|
||||
authorization, clusters, cluster-policies, dbsql-dashboards, directories,
|
||||
experiments, files, instance-pools, jobs, notebooks, pipelines, queries,
|
||||
registered-models, repos, serving-endpoints, or warehouses.
|
||||
authorization, clusters, cluster-policies, dashboards, dbsql-dashboards,
|
||||
directories, experiments, files, instance-pools, jobs, notebooks,
|
||||
pipelines, queries, registered-models, repos, serving-endpoints, or
|
||||
warehouses.
|
||||
REQUEST_OBJECT_ID: The id of the request object.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
@ -319,9 +321,10 @@ func newUpdate() *cobra.Command {
|
|||
|
||||
Arguments:
|
||||
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
|
||||
authorization, clusters, cluster-policies, dbsql-dashboards, directories,
|
||||
experiments, files, instance-pools, jobs, notebooks, pipelines, queries,
|
||||
registered-models, repos, serving-endpoints, or warehouses.
|
||||
authorization, clusters, cluster-policies, dashboards, dbsql-dashboards,
|
||||
directories, experiments, files, instance-pools, jobs, notebooks,
|
||||
pipelines, queries, registered-models, repos, serving-endpoints, or
|
||||
warehouses.
|
||||
REQUEST_OBJECT_ID: The id of the request object.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
|
|
@ -41,6 +41,7 @@ func New() *cobra.Command {
|
|||
cmd.AddCommand(newGet())
|
||||
cmd.AddCommand(newGetRefresh())
|
||||
cmd.AddCommand(newListRefreshes())
|
||||
cmd.AddCommand(newRegenerateDashboard())
|
||||
cmd.AddCommand(newRunRefresh())
|
||||
cmd.AddCommand(newUpdate())
|
||||
|
||||
|
@ -503,6 +504,87 @@ func newListRefreshes() *cobra.Command {
|
|||
return cmd
|
||||
}
|
||||
|
||||
// start regenerate-dashboard command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var regenerateDashboardOverrides []func(
|
||||
*cobra.Command,
|
||||
*catalog.RegenerateDashboardRequest,
|
||||
)
|
||||
|
||||
func newRegenerateDashboard() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var regenerateDashboardReq catalog.RegenerateDashboardRequest
|
||||
var regenerateDashboardJson flags.JsonFlag
|
||||
|
||||
// TODO: short flags
|
||||
cmd.Flags().Var(®enerateDashboardJson, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
|
||||
cmd.Flags().StringVar(®enerateDashboardReq.WarehouseId, "warehouse-id", regenerateDashboardReq.WarehouseId, `Optional argument to specify the warehouse for dashboard regeneration.`)
|
||||
|
||||
cmd.Use = "regenerate-dashboard TABLE_NAME"
|
||||
cmd.Short = `Regenerate a monitoring dashboard.`
|
||||
cmd.Long = `Regenerate a monitoring dashboard.
|
||||
|
||||
Regenerates the monitoring dashboard for the specified table.
|
||||
|
||||
The caller must either: 1. be an owner of the table's parent catalog 2. have
|
||||
**USE_CATALOG** on the table's parent catalog and be an owner of the table's
|
||||
parent schema 3. have the following permissions: - **USE_CATALOG** on the
|
||||
table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
|
||||
owner of the table
|
||||
|
||||
The call must be made from the workspace where the monitor was created. The
|
||||
dashboard will be regenerated in the assets directory that was specified when
|
||||
the monitor was created.
|
||||
|
||||
Arguments:
|
||||
TABLE_NAME: Full name of the table.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(1)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
if cmd.Flags().Changed("json") {
|
||||
err = regenerateDashboardJson.Unmarshal(®enerateDashboardReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
regenerateDashboardReq.TableName = args[0]
|
||||
|
||||
response, err := w.QualityMonitors.RegenerateDashboard(ctx, regenerateDashboardReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range regenerateDashboardOverrides {
|
||||
fn(cmd, ®enerateDashboardReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start run-refresh command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
|
|
10
go.mod
10
go.mod
|
@ -5,7 +5,7 @@ go 1.22
|
|||
require (
|
||||
github.com/Masterminds/semver/v3 v3.3.0 // MIT
|
||||
github.com/briandowns/spinner v1.23.1 // Apache 2.0
|
||||
github.com/databricks/databricks-sdk-go v0.45.0 // Apache 2.0
|
||||
github.com/databricks/databricks-sdk-go v0.46.0 // Apache 2.0
|
||||
github.com/fatih/color v1.17.0 // MIT
|
||||
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
|
||||
github.com/google/uuid v1.6.0 // BSD-3-Clause
|
||||
|
@ -23,10 +23,10 @@ require (
|
|||
github.com/stretchr/testify v1.9.0 // MIT
|
||||
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
|
||||
golang.org/x/mod v0.20.0
|
||||
golang.org/x/oauth2 v0.22.0
|
||||
golang.org/x/oauth2 v0.23.0
|
||||
golang.org/x/sync v0.8.0
|
||||
golang.org/x/term v0.23.0
|
||||
golang.org/x/text v0.17.0
|
||||
golang.org/x/term v0.24.0
|
||||
golang.org/x/text v0.18.0
|
||||
gopkg.in/ini.v1 v1.67.0 // Apache 2.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
)
|
||||
|
@ -61,7 +61,7 @@ require (
|
|||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||
golang.org/x/crypto v0.24.0 // indirect
|
||||
golang.org/x/net v0.26.0 // indirect
|
||||
golang.org/x/sys v0.23.0 // indirect
|
||||
golang.org/x/sys v0.25.0 // indirect
|
||||
golang.org/x/time v0.5.0 // indirect
|
||||
google.golang.org/api v0.182.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect
|
||||
|
|
|
@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
|
|||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
|
||||
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
|
||||
github.com/databricks/databricks-sdk-go v0.45.0 h1:wdx5Wm/ESrahdHeq62WrjLeGjV4r722LLanD8ahI0Mo=
|
||||
github.com/databricks/databricks-sdk-go v0.45.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
|
||||
github.com/databricks/databricks-sdk-go v0.46.0 h1:D0TxmtSVAOsdnfzH4OGtAmcq+8TyA7Z6fA6JEYhupeY=
|
||||
github.com/databricks/databricks-sdk-go v0.46.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
|
@ -191,8 +191,8 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
|
|||
golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ=
|
||||
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
|
||||
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
|
||||
golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
|
@ -208,14 +208,14 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM=
|
||||
golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU=
|
||||
golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk=
|
||||
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
|
||||
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
|
||||
golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
|
||||
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
|
||||
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
|
|
Loading…
Reference in New Issue