2024-09-27 09:32:54 +00:00
package trampoline
2023-09-27 08:26:59 +00:00
import (
"context"
2024-08-29 12:47:44 +00:00
"strconv"
2023-09-27 08:26:59 +00:00
"strings"
"github.com/databricks/cli/bundle"
2024-11-19 09:46:40 +00:00
"github.com/databricks/cli/bundle/config"
2023-09-27 08:26:59 +00:00
"github.com/databricks/cli/bundle/libraries"
2024-03-25 14:18:47 +00:00
"github.com/databricks/cli/libs/diag"
2023-09-29 12:19:05 +00:00
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
2023-09-27 08:26:59 +00:00
"golang.org/x/mod/semver"
)
type wrapperWarning struct {
}
func WrapperWarning ( ) bundle . Mutator {
return & wrapperWarning { }
}
2024-03-25 14:18:47 +00:00
func ( m * wrapperWarning ) Apply ( ctx context . Context , b * bundle . Bundle ) diag . Diagnostics {
2023-10-20 12:32:04 +00:00
if isPythonWheelWrapperOn ( b ) {
2024-11-19 09:46:40 +00:00
if config . IsExplicitlyEnabled ( b . Config . Presets . SourceLinkedDeployment ) {
return diag . Warningf ( "Python wheel notebook wrapper is not available when using source-linked deployment mode. You can disable this mode by setting 'presets.source_linked_deployment: false'" )
}
2023-10-20 12:32:04 +00:00
return nil
}
2023-09-27 08:26:59 +00:00
if hasIncompatibleWheelTasks ( ctx , b ) {
2024-04-23 19:36:25 +00:00
return diag . Errorf ( "Python wheel tasks require compute with DBR 13.3+ to include local libraries. Please change your cluster configuration or use the experimental 'python_wheel_wrapper' setting. See https://docs.databricks.com/dev-tools/bundles/python-wheel.html for more information." )
2023-09-27 08:26:59 +00:00
}
return nil
}
2023-10-20 12:32:04 +00:00
func isPythonWheelWrapperOn ( b * bundle . Bundle ) bool {
return b . Config . Experimental != nil && b . Config . Experimental . PythonWheelWrapper
}
2023-09-27 08:26:59 +00:00
func hasIncompatibleWheelTasks ( ctx context . Context , b * bundle . Bundle ) bool {
2024-08-05 14:44:23 +00:00
tasks := libraries . FindTasksWithLocalLibraries ( b )
2023-09-27 08:26:59 +00:00
for _ , task := range tasks {
if task . NewCluster != nil {
2024-08-29 12:47:44 +00:00
if lowerThanExpectedVersion ( task . NewCluster . SparkVersion ) {
2023-09-27 08:26:59 +00:00
return true
}
}
if task . JobClusterKey != "" {
for _ , job := range b . Config . Resources . Jobs {
for _ , cluster := range job . JobClusters {
2024-04-03 10:39:53 +00:00
if task . JobClusterKey == cluster . JobClusterKey && cluster . NewCluster . SparkVersion != "" {
2024-08-29 12:47:44 +00:00
if lowerThanExpectedVersion ( cluster . NewCluster . SparkVersion ) {
2023-09-27 08:26:59 +00:00
return true
}
}
}
}
}
2023-09-29 12:19:05 +00:00
if task . ExistingClusterId != "" {
version , err := getSparkVersionForCluster ( ctx , b . WorkspaceClient ( ) , task . ExistingClusterId )
// If there's error getting spark version for cluster, do not mark it as incompatible
if err != nil {
log . Warnf ( ctx , "unable to get spark version for cluster %s, err: %s" , task . ExistingClusterId , err . Error ( ) )
return false
}
2024-08-29 12:47:44 +00:00
if lowerThanExpectedVersion ( version ) {
2023-09-29 12:19:05 +00:00
return true
}
}
2023-09-27 08:26:59 +00:00
}
return false
}
2024-08-29 12:47:44 +00:00
func lowerThanExpectedVersion ( sparkVersion string ) bool {
2023-09-27 08:26:59 +00:00
parts := strings . Split ( sparkVersion , "." )
if len ( parts ) < 2 {
return false
}
2023-10-23 08:19:26 +00:00
if parts [ 1 ] [ 0 ] == 'x' { // treat versions like 13.x as the very latest minor (13.99)
parts [ 1 ] = "99"
}
2024-08-29 12:47:44 +00:00
// if any of the version parts are not numbers, we can't compare
// so consider it as compatible version
if _ , err := strconv . Atoi ( parts [ 0 ] ) ; err != nil {
return false
}
if _ , err := strconv . Atoi ( parts [ 1 ] ) ; err != nil {
return false
}
2023-09-27 08:26:59 +00:00
v := "v" + parts [ 0 ] + "." + parts [ 1 ]
return semver . Compare ( v , "v13.1" ) < 0
}
// Name implements bundle.Mutator.
func ( m * wrapperWarning ) Name ( ) string {
return "PythonWrapperWarning"
}
2023-09-29 12:19:05 +00:00
func getSparkVersionForCluster ( ctx context . Context , w * databricks . WorkspaceClient , clusterId string ) ( string , error ) {
details , err := w . Clusters . GetByClusterId ( ctx , clusterId )
if err != nil {
return "" , err
}
return details . SparkVersion , nil
}