2023-06-12 12:05:56 +00:00
package internal
import (
"context"
"path"
2023-07-26 13:03:10 +00:00
"regexp"
2023-06-12 12:05:56 +00:00
"strings"
"testing"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/databricks-sdk-go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
2023-07-26 13:03:10 +00:00
func TestAccFsMkdirCreatesDirectory ( t * testing . T ) {
2023-06-12 12:05:56 +00:00
t . Log ( GetEnvOrSkipTest ( t , "CLOUD_ENV" ) )
ctx := context . Background ( )
w , err := databricks . NewWorkspaceClient ( )
require . NoError ( t , err )
tmpDir := temporaryDbfsDir ( t , w )
f , err := filer . NewDbfsClient ( w , tmpDir )
require . NoError ( t , err )
// create directory "a"
stdout , stderr := RequireSuccessfulRun ( t , "fs" , "mkdir" , "dbfs:" + path . Join ( tmpDir , "a" ) )
assert . Equal ( t , "" , stderr . String ( ) )
assert . Equal ( t , "" , stdout . String ( ) )
// assert directory "a" is created
info , err := f . Stat ( ctx , "a" )
require . NoError ( t , err )
assert . Equal ( t , "a" , info . Name ( ) )
assert . Equal ( t , true , info . IsDir ( ) )
}
2023-07-26 13:03:10 +00:00
func TestAccFsMkdirCreatesMultipleDirectories ( t * testing . T ) {
2023-06-12 12:05:56 +00:00
t . Log ( GetEnvOrSkipTest ( t , "CLOUD_ENV" ) )
ctx := context . Background ( )
w , err := databricks . NewWorkspaceClient ( )
require . NoError ( t , err )
tmpDir := temporaryDbfsDir ( t , w )
f , err := filer . NewDbfsClient ( w , tmpDir )
require . NoError ( t , err )
// create directory /a/b/c
stdout , stderr := RequireSuccessfulRun ( t , "fs" , "mkdir" , "dbfs:" + path . Join ( tmpDir , "a" , "b" , "c" ) )
assert . Equal ( t , "" , stderr . String ( ) )
assert . Equal ( t , "" , stdout . String ( ) )
// assert directory "a" is created
infoA , err := f . Stat ( ctx , "a" )
require . NoError ( t , err )
assert . Equal ( t , "a" , infoA . Name ( ) )
assert . Equal ( t , true , infoA . IsDir ( ) )
// assert directory "b" is created
infoB , err := f . Stat ( ctx , "a/b" )
require . NoError ( t , err )
assert . Equal ( t , "b" , infoB . Name ( ) )
assert . Equal ( t , true , infoB . IsDir ( ) )
// assert directory "c" is created
infoC , err := f . Stat ( ctx , "a/b/c" )
require . NoError ( t , err )
assert . Equal ( t , "c" , infoC . Name ( ) )
assert . Equal ( t , true , infoC . IsDir ( ) )
}
2023-07-26 13:03:10 +00:00
func TestAccFsMkdirWhenDirectoryAlreadyExists ( t * testing . T ) {
2023-06-12 12:05:56 +00:00
t . Log ( GetEnvOrSkipTest ( t , "CLOUD_ENV" ) )
ctx := context . Background ( )
w , err := databricks . NewWorkspaceClient ( )
require . NoError ( t , err )
tmpDir := temporaryDbfsDir ( t , w )
// create directory "a"
f , err := filer . NewDbfsClient ( w , tmpDir )
require . NoError ( t , err )
err = f . Mkdir ( ctx , "a" )
require . NoError ( t , err )
// assert run is successful without any errors
stdout , stderr := RequireSuccessfulRun ( t , "fs" , "mkdir" , "dbfs:" + path . Join ( tmpDir , "a" ) )
assert . Equal ( t , "" , stderr . String ( ) )
assert . Equal ( t , "" , stdout . String ( ) )
}
2023-07-26 13:03:10 +00:00
func TestAccFsMkdirWhenFileExistsAtPath ( t * testing . T ) {
2023-06-12 12:05:56 +00:00
t . Log ( GetEnvOrSkipTest ( t , "CLOUD_ENV" ) )
ctx := context . Background ( )
w , err := databricks . NewWorkspaceClient ( )
require . NoError ( t , err )
tmpDir := temporaryDbfsDir ( t , w )
// create file hello
f , err := filer . NewDbfsClient ( w , tmpDir )
require . NoError ( t , err )
err = f . Write ( ctx , "hello" , strings . NewReader ( "abc" ) )
require . NoError ( t , err )
// assert run fails
_ , _ , err = RequireErrorRun ( t , "fs" , "mkdir" , "dbfs:" + path . Join ( tmpDir , "hello" ) )
2023-07-27 13:24:34 +00:00
// Different cloud providers return different errors.
2023-09-05 14:25:26 +00:00
regex := regexp . MustCompile ( ` (^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$ ` )
2023-07-26 13:03:10 +00:00
assert . Regexp ( t , regex , err . Error ( ) )
2023-06-12 12:05:56 +00:00
}