Consider serverless clusters as compatible for Python wheel tasks (#1733)

## Changes
Consider serverless clusters as compatible for Python wheel tasks.

Fixes a `Python wheel tasks require compute with DBR 13.3+ to include
local libraries` warning shown for serverless clusters
This commit is contained in:
Andrew Nester 2024-08-29 14:47:44 +02:00 committed by GitHub
parent 7dcc791b05
commit 43ace69bb9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 19 additions and 5 deletions

View File

@ -2,6 +2,7 @@ package python
import ( import (
"context" "context"
"strconv"
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -38,7 +39,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
tasks := libraries.FindTasksWithLocalLibraries(b) tasks := libraries.FindTasksWithLocalLibraries(b)
for _, task := range tasks { for _, task := range tasks {
if task.NewCluster != nil { if task.NewCluster != nil {
if lowerThanExpectedVersion(ctx, task.NewCluster.SparkVersion) { if lowerThanExpectedVersion(task.NewCluster.SparkVersion) {
return true return true
} }
} }
@ -47,7 +48,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
for _, job := range b.Config.Resources.Jobs { for _, job := range b.Config.Resources.Jobs {
for _, cluster := range job.JobClusters { for _, cluster := range job.JobClusters {
if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster.SparkVersion != "" { if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster.SparkVersion != "" {
if lowerThanExpectedVersion(ctx, cluster.NewCluster.SparkVersion) { if lowerThanExpectedVersion(cluster.NewCluster.SparkVersion) {
return true return true
} }
} }
@ -64,7 +65,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
return false return false
} }
if lowerThanExpectedVersion(ctx, version) { if lowerThanExpectedVersion(version) {
return true return true
} }
} }
@ -73,7 +74,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
return false return false
} }
func lowerThanExpectedVersion(ctx context.Context, sparkVersion string) bool { func lowerThanExpectedVersion(sparkVersion string) bool {
parts := strings.Split(sparkVersion, ".") parts := strings.Split(sparkVersion, ".")
if len(parts) < 2 { if len(parts) < 2 {
return false return false
@ -82,6 +83,17 @@ func lowerThanExpectedVersion(ctx context.Context, sparkVersion string) bool {
if parts[1][0] == 'x' { // treat versions like 13.x as the very latest minor (13.99) if parts[1][0] == 'x' { // treat versions like 13.x as the very latest minor (13.99)
parts[1] = "99" parts[1] = "99"
} }
// if any of the version parts are not numbers, we can't compare
// so consider it as compatible version
if _, err := strconv.Atoi(parts[0]); err != nil {
return false
}
if _, err := strconv.Atoi(parts[1]); err != nil {
return false
}
v := "v" + parts[0] + "." + parts[1] v := "v" + parts[0] + "." + parts[1]
return semver.Compare(v, "v13.1") < 0 return semver.Compare(v, "v13.1") < 0
} }

View File

@ -344,6 +344,8 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
"14.1.x-scala2.12": false, "14.1.x-scala2.12": false,
"13.x-snapshot-scala-2.12": false, "13.x-snapshot-scala-2.12": false,
"13.x-rc-scala-2.12": false, "13.x-rc-scala-2.12": false,
"client.1.10-scala2.12": false,
"latest-stable-gpu-scala2.11": false,
"10.4.x-aarch64-photon-scala2.12": true, "10.4.x-aarch64-photon-scala2.12": true,
"10.4.x-scala2.12": true, "10.4.x-scala2.12": true,
"13.0.x-scala2.12": true, "13.0.x-scala2.12": true,
@ -351,7 +353,7 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
} }
for k, v := range testCases { for k, v := range testCases {
result := lowerThanExpectedVersion(context.Background(), k) result := lowerThanExpectedVersion(k)
require.Equal(t, v, result, k) require.Equal(t, v, result, k)
} }
} }