2024-12-13 14:38:58 +00:00
|
|
|
package bundle_test
|
2024-03-18 15:39:18 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"testing"
|
|
|
|
|
2024-12-17 07:45:58 +00:00
|
|
|
"github.com/databricks/cli/integration/internal/acc"
|
2024-12-12 21:28:04 +00:00
|
|
|
"github.com/databricks/cli/internal/testutil"
|
2024-03-18 15:39:18 +00:00
|
|
|
"github.com/google/uuid"
|
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
)
|
|
|
|
|
2024-12-13 14:47:50 +00:00
|
|
|
func TestBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {
|
2024-03-18 15:39:18 +00:00
|
|
|
ctx, _ := acc.WorkspaceTest(t)
|
|
|
|
|
2024-12-12 21:28:04 +00:00
|
|
|
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
|
2024-03-18 15:39:18 +00:00
|
|
|
uniqueId := uuid.New().String()
|
2024-12-16 12:41:32 +00:00
|
|
|
root := initTestTemplate(t, ctx, "basic", map[string]any{
|
2024-03-18 15:39:18 +00:00
|
|
|
"unique_id": uniqueId,
|
|
|
|
"node_type_id": nodeTypeId,
|
2024-04-19 11:31:54 +00:00
|
|
|
"spark_version": defaultSparkVersion,
|
2024-03-18 15:39:18 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
t.Cleanup(func() {
|
2024-12-16 12:41:32 +00:00
|
|
|
destroyBundle(t, ctx, root)
|
2024-03-18 15:39:18 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
// deploy empty bundle
|
2024-12-16 12:41:32 +00:00
|
|
|
deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})
|
2024-03-18 15:39:18 +00:00
|
|
|
|
|
|
|
// Remove .databricks directory to simulate a fresh deployment
|
2024-12-16 12:41:32 +00:00
|
|
|
require.NoError(t, os.RemoveAll(filepath.Join(root, ".databricks")))
|
2024-03-18 15:39:18 +00:00
|
|
|
|
|
|
|
// deploy empty bundle again
|
2024-12-16 12:41:32 +00:00
|
|
|
deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})
|
2024-03-18 15:39:18 +00:00
|
|
|
}
|