mirror of https://github.com/databricks/cli.git
address comments
This commit is contained in:
parent
5ac2d678fd
commit
9493795d88
|
@ -2,7 +2,9 @@ package libraries
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
|
@ -11,6 +13,7 @@ import (
|
|||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/dyn/dynvar"
|
||||
"github.com/databricks/cli/libs/filer"
|
||||
"github.com/databricks/databricks-sdk-go/apierr"
|
||||
)
|
||||
|
||||
func extractVolumeFromPath(artifactPath string) (string, string, string, error) {
|
||||
|
@ -81,20 +84,27 @@ func filerForVolume(ctx context.Context, b *bundle.Bundle) (filer.Filer, string,
|
|||
return f, uploadPath, diag.FromErr(err)
|
||||
}
|
||||
|
||||
diags := diag.Errorf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path: %s", volumePath, err)
|
||||
|
||||
path, locations, ok := findVolumeInBundle(b, catalogName, schemaName, volumeName)
|
||||
if !ok {
|
||||
return nil, "", diags
|
||||
baseErr := diag.Diagnostic{
|
||||
Severity: diag.Error,
|
||||
Summary: fmt.Sprintf("failed to fetch metadata for %s: %s", volumePath, err),
|
||||
Locations: b.Config.GetLocations("workspace.artifact_path"),
|
||||
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
|
||||
}
|
||||
|
||||
warning := diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: `You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.`,
|
||||
Locations: locations,
|
||||
Paths: []dyn.Path{path},
|
||||
var aerr *apierr.APIError
|
||||
if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound {
|
||||
path, locations, ok := findVolumeInBundle(b, catalogName, schemaName, volumeName)
|
||||
if !ok {
|
||||
return nil, "", diag.Diagnostics{baseErr}
|
||||
}
|
||||
baseErr.Detail = `You are using a UC volume in your artifact_path that is managed by
|
||||
this bundle but which has not been deployed yet. Please deploy the UC volume in
|
||||
a separate bundle deploy before using it in the artifact_path.`
|
||||
baseErr.Paths = append(baseErr.Paths, path)
|
||||
baseErr.Locations = append(baseErr.Locations, locations...)
|
||||
}
|
||||
return nil, "", diags.Append(warning)
|
||||
|
||||
return nil, "", diag.Diagnostics{baseErr}
|
||||
}
|
||||
|
||||
func findVolumeInBundle(b *bundle.Bundle, catalogName, schemaName, volumeName string) (dyn.Path, []dyn.Location, bool) {
|
||||
|
|
|
@ -13,6 +13,7 @@ import (
|
|||
"github.com/databricks/cli/libs/diag"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/filer"
|
||||
"github.com/databricks/databricks-sdk-go/apierr"
|
||||
sdkconfig "github.com/databricks/databricks-sdk-go/config"
|
||||
"github.com/databricks/databricks-sdk-go/experimental/mocks"
|
||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||
|
@ -95,14 +96,21 @@ func TestFilerForVolumeNotInBundle(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}})
|
||||
|
||||
m := mocks.NewMockWorkspaceClient(t)
|
||||
m.WorkspaceClient.Config = &sdkconfig.Config{}
|
||||
m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/doesnotexist").Return(fmt.Errorf("error from API"))
|
||||
b.SetWorkpaceClient(m.WorkspaceClient)
|
||||
|
||||
_, _, diags := filerForVolume(context.Background(), b)
|
||||
assert.EqualError(t, diags.Error(), "failed to fetch metadata for the UC volume /Volumes/main/my_schema/doesnotexist that is configured in the artifact_path: error from API")
|
||||
assert.Len(t, diags, 1)
|
||||
assert.Equal(t, diag.Diagnostics{
|
||||
{
|
||||
Severity: diag.Error,
|
||||
Summary: "failed to fetch metadata for /Volumes/main/my_schema/doesnotexist: error from API",
|
||||
Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}},
|
||||
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
|
||||
}}, diags)
|
||||
}
|
||||
|
||||
func TestFilerForVolumeInBundle(t *testing.T) {
|
||||
|
@ -126,31 +134,29 @@ func TestFilerForVolumeInBundle(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{
|
||||
{
|
||||
File: "volume.yml",
|
||||
Line: 1,
|
||||
Column: 2,
|
||||
},
|
||||
})
|
||||
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}})
|
||||
bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{{File: "volume.yml", Line: 1, Column: 2}})
|
||||
|
||||
m := mocks.NewMockWorkspaceClient(t)
|
||||
m.WorkspaceClient.Config = &sdkconfig.Config{}
|
||||
m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/my_volume").Return(fmt.Errorf("error from API"))
|
||||
m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/my_volume").Return(&apierr.APIError{
|
||||
StatusCode: 404,
|
||||
Message: "error from API",
|
||||
})
|
||||
b.SetWorkpaceClient(m.WorkspaceClient)
|
||||
|
||||
_, _, diags := GetFilerForLibraries(context.Background(), b)
|
||||
assert.EqualError(t, diags.Error(), "failed to fetch metadata for the UC volume /Volumes/main/my_schema/my_volume that is configured in the artifact_path: error from API")
|
||||
assert.Contains(t, diags, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: "You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.",
|
||||
Locations: []dyn.Location{{
|
||||
File: "volume.yml",
|
||||
Line: 1,
|
||||
Column: 2,
|
||||
}},
|
||||
Paths: []dyn.Path{dyn.MustPathFromString("resources.volumes.foo")},
|
||||
})
|
||||
assert.Equal(t, diag.Diagnostics{
|
||||
{
|
||||
Severity: diag.Error,
|
||||
Summary: "failed to fetch metadata for /Volumes/main/my_schema/my_volume: error from API",
|
||||
Locations: []dyn.Location{{"config.yml", 1, 2}, {"volume.yml", 1, 2}},
|
||||
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path"), dyn.MustPathFromString("resources.volumes.foo")},
|
||||
Detail: `You are using a UC volume in your artifact_path that is managed by
|
||||
this bundle but which has not been deployed yet. Please deploy the UC volume in
|
||||
a separate bundle deploy before using it in the artifact_path.`,
|
||||
},
|
||||
}, diags)
|
||||
}
|
||||
|
||||
func invalidVolumePaths() []string {
|
||||
|
|
|
@ -9,17 +9,15 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/bundletest"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/resources"
|
||||
"github.com/databricks/cli/bundle/libraries"
|
||||
"github.com/databricks/cli/internal"
|
||||
"github.com/databricks/cli/internal/acc"
|
||||
"github.com/databricks/cli/libs/diag"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||
"github.com/google/uuid"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
@ -232,7 +230,7 @@ func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
|
|||
)
|
||||
}
|
||||
|
||||
func TestAccUploadArtifactFileToInvalidVolume(t *testing.T) {
|
||||
func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
|
||||
ctx, wt := acc.UcWorkspaceTest(t)
|
||||
w := wt.W
|
||||
|
||||
|
@ -250,93 +248,64 @@ func TestAccUploadArtifactFileToInvalidVolume(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("volume not in DAB", func(t *testing.T) {
|
||||
volumePath := fmt.Sprintf("/Volumes/main/%s/doesnotexist", schemaName)
|
||||
dir := t.TempDir()
|
||||
|
||||
b := &bundle.Bundle{
|
||||
BundleRootPath: dir,
|
||||
SyncRootPath: dir,
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
Target: "whatever",
|
||||
},
|
||||
Workspace: config.Workspace{
|
||||
ArtifactPath: volumePath,
|
||||
},
|
||||
Resources: config.Resources{
|
||||
Volumes: map[string]*resources.Volume{
|
||||
"foo": {
|
||||
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
|
||||
CatalogName: "main",
|
||||
Name: "my_volume",
|
||||
VolumeType: "MANAGED",
|
||||
SchemaName: schemaName,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
diags := bundle.Apply(ctx, b, libraries.Upload())
|
||||
assert.ErrorContains(t, diags.Error(), fmt.Sprintf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path:", volumePath))
|
||||
bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{
|
||||
"unique_id": uuid.New().String(),
|
||||
"schema_name": schemaName,
|
||||
"volume_name": "doesnotexist",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("volume in DAB config", func(t *testing.T) {
|
||||
volumePath := fmt.Sprintf("/Volumes/main/%s/my_volume", schemaName)
|
||||
dir := t.TempDir()
|
||||
t.Setenv("BUNDLE_ROOT", bundleRoot)
|
||||
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy")
|
||||
|
||||
b := &bundle.Bundle{
|
||||
BundleRootPath: dir,
|
||||
SyncRootPath: dir,
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
Target: "whatever",
|
||||
},
|
||||
Workspace: config.Workspace{
|
||||
ArtifactPath: volumePath,
|
||||
},
|
||||
Resources: config.Resources{
|
||||
Volumes: map[string]*resources.Volume{
|
||||
"foo": {
|
||||
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
|
||||
CatalogName: "main",
|
||||
Name: "my_volume",
|
||||
VolumeType: "MANAGED",
|
||||
SchemaName: schemaName,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, fmt.Sprintf(`Error: failed to fetch metadata for /Volumes/main/%s/doesnotexist: Not Found
|
||||
at workspace.artifact_path
|
||||
in databricks.yml:6:18
|
||||
|
||||
// set location of volume definition in config.
|
||||
bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{{
|
||||
File: filepath.Join(dir, "databricks.yml"),
|
||||
Line: 1,
|
||||
Column: 2,
|
||||
}})
|
||||
|
||||
diags := bundle.Apply(ctx, b, libraries.Upload())
|
||||
assert.Contains(t, diags, diag.Diagnostic{
|
||||
Severity: diag.Error,
|
||||
Summary: fmt.Sprintf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path: Not Found", volumePath),
|
||||
})
|
||||
assert.Contains(t, diags, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: "You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.",
|
||||
Locations: []dyn.Location{
|
||||
{
|
||||
File: filepath.Join(dir, "databricks.yml"),
|
||||
Line: 1,
|
||||
Column: 2,
|
||||
},
|
||||
},
|
||||
Paths: []dyn.Path{
|
||||
dyn.MustPathFromString("resources.volumes.foo"),
|
||||
},
|
||||
})
|
||||
})
|
||||
`, schemaName), stdout.String())
|
||||
assert.Equal(t, "", stderr.String())
|
||||
}
|
||||
|
||||
func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
|
||||
ctx, wt := acc.UcWorkspaceTest(t)
|
||||
w := wt.W
|
||||
|
||||
schemaName := internal.RandomName("schema-")
|
||||
|
||||
_, err := w.Schemas.Create(ctx, catalog.CreateSchema{
|
||||
CatalogName: "main",
|
||||
Comment: "test schema",
|
||||
Name: schemaName,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Cleanup(func() {
|
||||
err = w.Schemas.DeleteByFullName(ctx, "main."+schemaName)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{
|
||||
"unique_id": uuid.New().String(),
|
||||
"schema_name": schemaName,
|
||||
"volume_name": "my_volume",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Setenv("BUNDLE_ROOT", bundleRoot)
|
||||
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy")
|
||||
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, fmt.Sprintf(`Error: failed to fetch metadata for /Volumes/main/%s/my_volume: Not Found
|
||||
at workspace.artifact_path
|
||||
resources.volumes.foo
|
||||
in databricks.yml:6:18
|
||||
databricks.yml:11:7
|
||||
|
||||
You are using a UC volume in your artifact_path that is managed by
|
||||
this bundle but which has not been deployed yet. Please deploy the UC volume in
|
||||
a separate bundle deploy before using it in the artifact_path.
|
||||
|
||||
`, schemaName), stdout.String())
|
||||
assert.Equal(t, "", stderr.String())
|
||||
}
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"properties": {
|
||||
"unique_id": {
|
||||
"type": "string",
|
||||
"description": "Unique ID for job name"
|
||||
},
|
||||
"schema_name": {
|
||||
"type": "string",
|
||||
"description": "schema name to use in the artifact_path"
|
||||
},
|
||||
"volume_name": {
|
||||
"type": "string",
|
||||
"description": "volume name to use in the artifact_path"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
bundle:
|
||||
name: artifact_path_with_volume
|
||||
|
||||
workspace:
|
||||
root_path: "~/.bundle/{{.unique_id}}"
|
||||
artifact_path: /Volumes/main/{{.schema_name}}/{{.volume_name}}
|
||||
|
||||
resources:
|
||||
volumes:
|
||||
foo:
|
||||
catalog_name: main
|
||||
name: my_volume
|
||||
schema_name: {{.schema_name}}
|
||||
volume_type: MANAGED
|
Loading…
Reference in New Issue