Compare commits

..

No commits in common. "d1ec088d706b4ca8d4560fc5cbf9b8136caab6a9" and "5ac2d678fd14618aee5e089d05c2ed8c3ea71b88" have entirely different histories.

8 changed files with 128 additions and 173 deletions

View File

@ -6,10 +6,10 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/bundletest"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/bundletest"
"github.com/databricks/cli/libs/dbr" "github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"

View File

@ -2,9 +2,7 @@ package libraries
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"net/http"
"path" "path"
"strings" "strings"
@ -13,7 +11,6 @@ import (
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/dyn/dynvar"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/apierr"
) )
func extractVolumeFromPath(artifactPath string) (string, string, string, error) { func extractVolumeFromPath(artifactPath string) (string, string, string, error) {
@ -56,6 +53,10 @@ func filerForVolume(ctx context.Context, b *bundle.Bundle) (filer.Filer, string,
artifactPath := b.Config.Workspace.ArtifactPath artifactPath := b.Config.Workspace.ArtifactPath
w := b.WorkspaceClient() w := b.WorkspaceClient()
if !IsVolumesPath(artifactPath) {
return nil, "", diag.Errorf("expected artifact_path to start with /Volumes/, got %s", artifactPath)
}
catalogName, schemaName, volumeName, err := extractVolumeFromPath(artifactPath) catalogName, schemaName, volumeName, err := extractVolumeFromPath(artifactPath)
if err != nil { if err != nil {
return nil, "", diag.Diagnostics{ return nil, "", diag.Diagnostics{
@ -75,33 +76,25 @@ func filerForVolume(ctx context.Context, b *bundle.Bundle) (filer.Filer, string,
// If the volume exists already, directly return the filer for the path to // If the volume exists already, directly return the filer for the path to
// upload the artifacts to. // upload the artifacts to.
if err == nil { if err == nil {
uploadPath := path.Join(artifactPath, InternalDirName) uploadPath := path.Join(artifactPath, ".internal")
f, err := filer.NewFilesClient(w, uploadPath) f, err := filer.NewFilesClient(w, uploadPath)
return f, uploadPath, diag.FromErr(err) return f, uploadPath, diag.FromErr(err)
} }
baseErr := diag.Diagnostic{ diags := diag.Errorf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path: %s", volumePath, err)
Severity: diag.Error,
Summary: fmt.Sprintf("failed to fetch metadata for %s: %s", volumePath, err),
Locations: b.Config.GetLocations("workspace.artifact_path"),
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
}
var aerr *apierr.APIError
if errors.As(err, &aerr) && aerr.StatusCode == http.StatusNotFound {
path, locations, ok := findVolumeInBundle(b, catalogName, schemaName, volumeName) path, locations, ok := findVolumeInBundle(b, catalogName, schemaName, volumeName)
if !ok { if !ok {
return nil, "", diag.Diagnostics{baseErr} return nil, "", diags
}
baseErr.Detail = `You are using a UC volume in your artifact_path that is managed by
this bundle but which has not been deployed yet. Please first deploy
the UC volume using 'bundle deploy' and then switch over to using it in
the artifact_path.`
baseErr.Paths = append(baseErr.Paths, path)
baseErr.Locations = append(baseErr.Locations, locations...)
} }
return nil, "", diag.Diagnostics{baseErr} warning := diag.Diagnostic{
Severity: diag.Warning,
Summary: `You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.`,
Locations: locations,
Paths: []dyn.Path{path},
}
return nil, "", diags.Append(warning)
} }
func findVolumeInBundle(b *bundle.Bundle, catalogName, schemaName, volumeName string) (dyn.Path, []dyn.Location, bool) { func findVolumeInBundle(b *bundle.Bundle, catalogName, schemaName, volumeName string) (dyn.Path, []dyn.Location, bool) {

View File

@ -13,7 +13,6 @@ import (
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go/apierr"
sdkconfig "github.com/databricks/databricks-sdk-go/config" sdkconfig "github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
@ -96,21 +95,14 @@ func TestFilerForVolumeNotInBundle(t *testing.T) {
}, },
} }
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}})
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
m.WorkspaceClient.Config = &sdkconfig.Config{} m.WorkspaceClient.Config = &sdkconfig.Config{}
m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/doesnotexist").Return(fmt.Errorf("error from API")) m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/doesnotexist").Return(fmt.Errorf("error from API"))
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
_, _, diags := filerForVolume(context.Background(), b) _, _, diags := filerForVolume(context.Background(), b)
assert.Equal(t, diag.Diagnostics{ assert.EqualError(t, diags.Error(), "failed to fetch metadata for the UC volume /Volumes/main/my_schema/doesnotexist that is configured in the artifact_path: error from API")
{ assert.Len(t, diags, 1)
Severity: diag.Error,
Summary: "failed to fetch metadata for /Volumes/main/my_schema/doesnotexist: error from API",
Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}},
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
}}, diags)
} }
func TestFilerForVolumeInBundle(t *testing.T) { func TestFilerForVolumeInBundle(t *testing.T) {
@ -134,30 +126,31 @@ func TestFilerForVolumeInBundle(t *testing.T) {
}, },
} }
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}) bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{
bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{{File: "volume.yml", Line: 1, Column: 2}}) {
File: "volume.yml",
Line: 1,
Column: 2,
},
})
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
m.WorkspaceClient.Config = &sdkconfig.Config{} m.WorkspaceClient.Config = &sdkconfig.Config{}
m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/my_volume").Return(&apierr.APIError{ m.GetMockFilesAPI().EXPECT().GetDirectoryMetadataByDirectoryPath(mock.Anything, "/Volumes/main/my_schema/my_volume").Return(fmt.Errorf("error from API"))
StatusCode: 404,
Message: "error from API",
})
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
_, _, diags := GetFilerForLibraries(context.Background(), b) _, _, diags := GetFilerForLibraries(context.Background(), b)
assert.Equal(t, diag.Diagnostics{ assert.EqualError(t, diags.Error(), "failed to fetch metadata for the UC volume /Volumes/main/my_schema/my_volume that is configured in the artifact_path: error from API")
{ assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Error, Severity: diag.Warning,
Summary: "failed to fetch metadata for /Volumes/main/my_schema/my_volume: error from API", Summary: "You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.",
Locations: []dyn.Location{{"config.yml", 1, 2}, {"volume.yml", 1, 2}}, Locations: []dyn.Location{{
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path"), dyn.MustPathFromString("resources.volumes.foo")}, File: "volume.yml",
Detail: `You are using a UC volume in your artifact_path that is managed by Line: 1,
this bundle but which has not been deployed yet. Please first deploy Column: 2,
the UC volume using 'bundle deploy' and then switch over to using it in }},
the artifact_path.`, Paths: []dyn.Path{dyn.MustPathFromString("resources.volumes.foo")},
}, })
}, diags)
} }
func invalidVolumePaths() []string { func invalidVolumePaths() []string {

View File

@ -8,12 +8,8 @@ import (
"github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/filer"
) )
// We upload artifacts to the workspace in a directory named ".internal" to have
// a well defined location for artifacts that have been uploaded by the DABs.
const InternalDirName = ".internal"
func filerForWorkspace(b *bundle.Bundle) (filer.Filer, string, diag.Diagnostics) { func filerForWorkspace(b *bundle.Bundle) (filer.Filer, string, diag.Diagnostics) {
uploadPath := path.Join(b.Config.Workspace.ArtifactPath, InternalDirName) uploadPath := path.Join(b.Config.Workspace.ArtifactPath, ".internal")
f, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), uploadPath) f, err := filer.NewWorkspaceFilesClient(b.WorkspaceClient(), uploadPath)
return f, uploadPath, diag.FromErr(err) return f, uploadPath, diag.FromErr(err)
} }

View File

@ -1,27 +0,0 @@
package libraries
import (
"path"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/filer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFilerForWorkspace(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Workspace: config.Workspace{
ArtifactPath: "/Workspace/Users/shreyas.goenka@databricks.com/a/b/c",
},
},
}
client, uploadPath, diags := filerForWorkspace(b)
require.NoError(t, diags.Error())
assert.Equal(t, path.Join("/Workspace/Users/shreyas.goenka@databricks.com/a/b/c/.internal"), uploadPath)
assert.IsType(t, &filer.WorkspaceFilesClient{}, client)
}

View File

@ -9,15 +9,17 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/bundletest"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/google/uuid"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -230,7 +232,7 @@ func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
) )
} }
func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) { func TestAccUploadArtifactFileToInvalidVolume(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t) ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W w := wt.W
@ -248,65 +250,93 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
}) })
bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ t.Run("volume not in DAB", func(t *testing.T) {
"unique_id": uuid.New().String(), volumePath := fmt.Sprintf("/Volumes/main/%s/doesnotexist", schemaName)
"schema_name": schemaName, dir := t.TempDir()
"volume_name": "doesnotexist",
})
require.NoError(t, err)
t.Setenv("BUNDLE_ROOT", bundleRoot) b := &bundle.Bundle{
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy") BundleRootPath: dir,
SyncRootPath: dir,
assert.Error(t, err) Config: config.Root{
assert.Equal(t, fmt.Sprintf(`Error: failed to fetch metadata for /Volumes/main/%s/doesnotexist: Not Found Bundle: config.Bundle{
at workspace.artifact_path Target: "whatever",
in databricks.yml:6:18 },
Workspace: config.Workspace{
`, schemaName), stdout.String()) ArtifactPath: volumePath,
assert.Equal(t, "", stderr.String()) },
} Resources: config.Resources{
Volumes: map[string]*resources.Volume{
func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) { "foo": {
ctx, wt := acc.UcWorkspaceTest(t) CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
w := wt.W
schemaName := internal.RandomName("schema-")
_, err := w.Schemas.Create(ctx, catalog.CreateSchema{
CatalogName: "main", CatalogName: "main",
Comment: "test schema", Name: "my_volume",
Name: schemaName, VolumeType: "MANAGED",
}) SchemaName: schemaName,
require.NoError(t, err) },
},
t.Cleanup(func() { },
err = w.Schemas.DeleteByFullName(ctx, "main."+schemaName) },
require.NoError(t, err) },
}) }
bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ diags := bundle.Apply(ctx, b, libraries.Upload())
"unique_id": uuid.New().String(), assert.ErrorContains(t, diags.Error(), fmt.Sprintf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path:", volumePath))
"schema_name": schemaName, })
"volume_name": "my_volume",
}) t.Run("volume in DAB config", func(t *testing.T) {
require.NoError(t, err) volumePath := fmt.Sprintf("/Volumes/main/%s/my_volume", schemaName)
dir := t.TempDir()
t.Setenv("BUNDLE_ROOT", bundleRoot)
stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy") b := &bundle.Bundle{
BundleRootPath: dir,
assert.Error(t, err) SyncRootPath: dir,
assert.Equal(t, fmt.Sprintf(`Error: failed to fetch metadata for /Volumes/main/%s/my_volume: Not Found Config: config.Root{
at workspace.artifact_path Bundle: config.Bundle{
resources.volumes.foo Target: "whatever",
in databricks.yml:6:18 },
databricks.yml:11:7 Workspace: config.Workspace{
ArtifactPath: volumePath,
You are using a UC volume in your artifact_path that is managed by },
this bundle but which has not been deployed yet. Please first deploy Resources: config.Resources{
the UC volume using 'bundle deploy' and then switch over to using it in Volumes: map[string]*resources.Volume{
the artifact_path. "foo": {
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
`, schemaName), stdout.String()) CatalogName: "main",
assert.Equal(t, "", stderr.String()) Name: "my_volume",
VolumeType: "MANAGED",
SchemaName: schemaName,
},
},
},
},
},
}
// set location of volume definition in config.
bundletest.SetLocation(b, "resources.volumes.foo", []dyn.Location{{
File: filepath.Join(dir, "databricks.yml"),
Line: 1,
Column: 2,
}})
diags := bundle.Apply(ctx, b, libraries.Upload())
assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("failed to fetch metadata for the UC volume %s that is configured in the artifact_path: Not Found", volumePath),
})
assert.Contains(t, diags, diag.Diagnostic{
Severity: diag.Warning,
Summary: "You might be using a UC volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please deploy the UC volume in a separate bundle deploy before using it in the artifact_path.",
Locations: []dyn.Location{
{
File: filepath.Join(dir, "databricks.yml"),
Line: 1,
Column: 2,
},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.volumes.foo"),
},
})
})
} }

View File

@ -1,16 +0,0 @@
{
"properties": {
"unique_id": {
"type": "string",
"description": "Unique ID for job name"
},
"schema_name": {
"type": "string",
"description": "schema name to use in the artifact_path"
},
"volume_name": {
"type": "string",
"description": "volume name to use in the artifact_path"
}
}
}

View File

@ -1,14 +0,0 @@
bundle:
name: artifact_path_with_volume
workspace:
root_path: "~/.bundle/{{.unique_id}}"
artifact_path: /Volumes/main/{{.schema_name}}/{{.volume_name}}
resources:
volumes:
foo:
catalog_name: main
name: my_volume
schema_name: {{.schema_name}}
volume_type: MANAGED