Compare commits

..

No commits in common. "d1ec088d706b4ca8d4560fc5cbf9b8136caab6a9" and "5ac2d678fd14618aee5e089d05c2ed8c3ea71b88" have entirely different histories.

8 changed files with 128 additions and 173 deletions

View File

@ -6,10 +6,10 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/bundletest"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/bundletest"
"github.com/databricks/cli/libs/dbr" "github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"

View File

@ -2,9 +2,7 @@ package libraries
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"net/http"
"path" "path"
"strings" "strings"
@ -13,7 +11,6 @@ import (
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/dyn/dynvar"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/apierr"
) )
func extractVolumeFromPath(artifactPath string) (string, string, string, error) { func extractVolumeFromPath(artifactPath string) (string, string, string, error) {
@ -56,6 +53,10 @@ func filerForVolume(ctx context.Context, b *bundle.Bundle) (filer.Filer, string,
artifactPath := b.Config.Workspace.ArtifactPath artifactPath := b.Config.Workspace.ArtifactPath
w := b.WorkspaceClient() w := b.WorkspaceClient()
if !IsVolumesPath(artifactPath) {
return nil, "", diag.Errorf("expected artifact_path to start with /Volumes/, got %s", artifactPath)
}
catalogName, schemaName, volumeName, err := extractVolumeFromPath(artifactPath) catalogName, schemaName, volumeName, err := extractVolumeFromPath(artifactPath)
if err != nil { if err != nil {
return nil, "", diag.Diagnostics{ return nil, "", diag.Diagnostics{
@ -75,33 +76,25 @@ func filerForVolume(ctx context.Context, b *bundle.Bundle) (filer.Filer, string,
// If the volume exists already, directly return the filer for the path to // If the volume exists already, directly return the filer for the path to
// upload the artifacts to. // upload the artifacts to.
if err == nil { if err == nil {
uploadPath := path.Join(artifactPath, InternalDirName) uploadPath := path.Join(artifactPath, ".internal")
f, err := filer.NewFilesClient(w, uploadPath) f, err := filer.NewFilesClient(w, uploadPath)
return f, uploadPath, diag.FromErr(err) return f, uploadPath, diag.FromErr(err)
} }
baseErr := diag.Diagnostic{ diags := diag.Errorf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path: %s", volumePath, err)
Severity: diag.Error,
Summary: fmt.Sprintf("failed to fetch metadata for %s: %s", volumePath, err), path, locations, ok := findVolumeInBundle(b, catalogName, schemaName, volumeName)
Locations: b.Config.GetLocations("workspace.artifact_path"), if !ok {
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, return nil, "", diags
} }
var aerr *apierr.APIError warning := diag.Diagnostic{
if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound { Severity: diag.Warning,
path, locations, ok := findVolumeInBundle(b, catalogName, schemaName, volumeName) Summary: `You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.`,
if !ok { Locations: locations,
return nil, "", diag.Diagnostics{baseErr} Paths: []dyn.Path{path},
}
baseErr.Detail = `You are using a UC volume in your artifact_path that is managed by
this bundle but which has not been deployed yet. Please first deploy
the UC volume using 'bundle deploy' and then switch over to using it in
the artifact_path.`
baseErr.Paths = append(baseErr.Paths, path)
baseErr.Locations = append(baseErr.Locations, locations...)
} }
return nil, "", diags.Append(warning)
return nil, "", diag.Diagnostics{baseErr}
} }
func findVolumeInBundle(b *bundle.Bundle, catalogName, schemaName, volumeName string) (dyn.Path, []dyn.Location, bool) { func findVolumeInBundle(b *bundle.Bundle, catalogName, schemaName, volumeName string) (dyn.Path, []dyn.Location, bool) {

View File

@ -13,7 +13,6 @@ import (
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/apierr"
sdkconfig "github.com/databricks/databricks-sdk-go/config" sdkconfig "github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
@ -96,21 +95,14 @@ func TestFilerForVolumeNotInBundle(t *testing.T) {
}, },
} }
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}})
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
m.WorkspaceClient.Config = &sdkconfig.Config{} m.WorkspaceClient.Config = &sdkconfig.Config{}
m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/doesnotexist").Return(fmt.Errorf("error from API")) m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/doesnotexist").Return(fmt.Errorf("error from API"))
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
_, _, diags := filerForVolume(context.Background(), b) _, _, diags := filerForVolume(context.Background(), b)
assert.Equal(t, diag.Diagnostics{ assert.EqualError(t, diags.Error(), "failed to fetch metadata for the UC volume /Volumes/main/my_schema/doesnotexist that is configured in the artifact_path: error from API")
{ assert.Len(t, diags, 1)
Severity: diag.Error,
Summary: "failed to fetch metadata for /Volumes/main/my_schema/doesnotexist: error from API",
Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}},
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
}}, diags)
} }
func TestFilerForVolumeInBundle(t *testing.T) { func TestFilerForVolumeInBundle(t *testing.T) {
@ -134,30 +126,31 @@ func TestFilerForVolumeInBundle(t *testing.T) {
}, },
} }
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}) bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{
bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{{File: "volume.yml", Line: 1, Column: 2}}) {
File: "volume.yml",
Line: 1,
Column: 2,
},
})
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
m.WorkspaceClient.Config = &sdkconfig.Config{} m.WorkspaceClient.Config = &sdkconfig.Config{}
m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/my_volume").Return(&apierr.APIError{ m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/my_volume").Return(fmt.Errorf("error from API"))
StatusCode: 404,
Message: "error from API",
})
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
_, _, diags := GetFilerForLibraries(context.Background(), b) _, _, diags := GetFilerForLibraries(context.Background(), b)
assert.Equal(t, diag.Diagnostics{ assert.EqualError(t, diags.Error(), "failed to fetch metadata for the UC volume /Volumes/main/my_schema/my_volume that is configured in the artifact_path: error from API")
{ assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Error, Severity: diag.Warning,
Summary: "failed to fetch metadata for /Volumes/main/my_schema/my_volume: error from API", Summary: "You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.",
Locations: []dyn.Location{{"config.yml", 1, 2}, {"volume.yml", 1, 2}}, Locations: []dyn.Location{{
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path"), dyn.MustPathFromString("resources.volumes.foo")}, File: "volume.yml",
Detail: `You are using a UC volume in your artifact_path that is managed by Line: 1,
this bundle but which has not been deployed yet. Please first deploy Column: 2,
the UC volume using 'bundle deploy' and then switch over to using it in }},
the artifact_path.`, Paths: []dyn.Path{dyn.MustPathFromString("resources.volumes.foo")},
}, })
}, diags)
} }
func invalidVolumePaths() []string { func invalidVolumePaths() []string {

View File

@ -8,12 +8,8 @@ import (
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
) )
// We upload artifacts to the workspace in a directory named ".internal" to have
// a well defined location for artifacts that have been uploaded by the DABs.
const InternalDirName = ".internal"
func filerForWorkspace(b *bundle.Bundle) (filer.Filer, string, diag.Diagnostics) { func filerForWorkspace(b *bundle.Bundle) (filer.Filer, string, diag.Diagnostics) {
uploadPath := path.Join(b.Config.Workspace.ArtifactPath, InternalDirName) uploadPath := path.Join(b.Config.Workspace.ArtifactPath, ".internal")
f, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), uploadPath) f, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), uploadPath)
return f, uploadPath, diag.FromErr(err) return f, uploadPath, diag.FromErr(err)
} }

View File

@ -1,27 +0,0 @@
package libraries
import (
"path"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFilerForWorkspace(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Workspace: config.Workspace{
ArtifactPath: "/Workspace/Users/shreyas.goenka@databricks.com/a/b/c",
},
},
}
client, uploadPath, diags := filerForWorkspace(b)
require.NoError(t, diags.Error())
assert.Equal(t, path.Join("/Workspace/Users/shreyas.goenka@databricks.com/a/b/c/.internal"), uploadPath)
assert.IsType(t, &filer.WorkspaceFilesClient{}, client)
}

View File

@ -9,15 +9,17 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/bundletest"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/google/uuid"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -230,7 +232,7 @@ func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
) )
} }
func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) { func TestAccUploadArtifactFileToInvalidVolume(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
@ -248,65 +250,93 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
}) })
bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ t.Run("volume not in DAB", func(t *testing.T) {
"unique_id": uuid.New().String(), volumePath := fmt.Sprintf("/Volumes/main/%s/doesnotexist", schemaName)
"schema_name": schemaName, dir := t.TempDir()
"volume_name": "doesnotexist",
b := &bundle.Bundle{
BundleRootPath: dir,
SyncRootPath: dir,
Config: config.Root{
Bundle: config.Bundle{
Target: "whatever",
},
Workspace: config.Workspace{
ArtifactPath: volumePath,
},
Resources: config.Resources{
Volumes: map[string]*resources.Volume{
"foo": {
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
CatalogName: "main",
Name: "my_volume",
VolumeType: "MANAGED",
SchemaName: schemaName,
},
},
},
},
},
}
diags := bundle.Apply(ctx, b, libraries.Upload())
assert.ErrorContains(t, diags.Error(), fmt.Sprintf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path:", volumePath))
}) })
require.NoError(t, err)
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Run("volume in DAB config", func(t *testing.T) {
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy") volumePath := fmt.Sprintf("/Volumes/main/%s/my_volume", schemaName)
dir := t.TempDir()
assert.Error(t, err) b := &bundle.Bundle{
assert.Equal(t, fmt.Sprintf(`Error: failed to fetch metadata for /Volumes/main/%s/doesnotexist: Not Found BundleRootPath: dir,
at workspace.artifact_path SyncRootPath: dir,
in databricks.yml:6:18 Config: config.Root{
Bundle: config.Bundle{
Target: "whatever",
},
Workspace: config.Workspace{
ArtifactPath: volumePath,
},
Resources: config.Resources{
Volumes: map[string]*resources.Volume{
"foo": {
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
CatalogName: "main",
Name: "my_volume",
VolumeType: "MANAGED",
SchemaName: schemaName,
},
},
},
},
},
}
`, schemaName), stdout.String()) // set location of volume definition in config.
assert.Equal(t, "", stderr.String()) bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{{
} File: filepath.Join(dir, "databricks.yml"),
Line: 1,
func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) { Column: 2,
ctx, wt := acc.UcWorkspaceTest(t) }})
w := wt.W
diags := bundle.Apply(ctx, b, libraries.Upload())
schemaName := internal.RandomName("schema-") assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Error,
_, err := w.Schemas.Create(ctx, catalog.CreateSchema{ Summary: fmt.Sprintf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path: Not Found", volumePath),
CatalogName: "main", })
Comment: "test schema", assert.Contains(t, diags, diag.Diagnostic{
Name: schemaName, Severity: diag.Warning,
}) Summary: "You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.",
require.NoError(t, err) Locations: []dyn.Location{
{
t.Cleanup(func() { File: filepath.Join(dir, "databricks.yml"),
err = w.Schemas.DeleteByFullName(ctx, "main."+schemaName) Line: 1,
require.NoError(t, err) Column: 2,
}) },
},
bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ Paths: []dyn.Path{
"unique_id": uuid.New().String(), dyn.MustPathFromString("resources.volumes.foo"),
"schema_name": schemaName, },
"volume_name": "my_volume", })
}) })
require.NoError(t, err)
t.Setenv("BUNDLE_ROOT", bundleRoot)
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy")
assert.Error(t, err)
assert.Equal(t, fmt.Sprintf(`Error: failed to fetch metadata for /Volumes/main/%s/my_volume: Not Found
at workspace.artifact_path
resources.volumes.foo
in databricks.yml:6:18
databricks.yml:11:7
You are using a UC volume in your artifact_path that is managed by
this bundle but which has not been deployed yet. Please first deploy
the UC volume using 'bundle deploy' and then switch over to using it in
the artifact_path.
`, schemaName), stdout.String())
assert.Equal(t, "", stderr.String())
} }

View File

@ -1,16 +0,0 @@
{
"properties": {
"unique_id": {
"type": "string",
"description": "Unique ID for job name"
},
"schema_name": {
"type": "string",
"description": "schema name to use in the artifact_path"
},
"volume_name": {
"type": "string",
"description": "volume name to use in the artifact_path"
}
}
}

View File

@ -1,14 +0,0 @@
bundle:
name: artifact_path_with_volume
workspace:
root_path: "~/.bundle/{{.unique_id}}"
artifact_path: /Volumes/main/{{.schema_name}}/{{.volume_name}}
resources:
volumes:
foo:
catalog_name: main
name: my_volume
schema_name: {{.schema_name}}
volume_type: MANAGED