mirror of https://github.com/databricks/cli.git
Enable testifylint and fix the issues (#2065)
## Changes - Enable new linter: testifylint. - Apply fixes with --fix. - Fix remaining issues (mostly with aider). There were 2 cases we --fix did the wrong thing - this seems to a be a bug in linter: https://github.com/Antonboom/testifylint/issues/210 Nonetheless, I kept that check enabled, it seems useful, just need to be fixed manually after autofix. ## Tests Existing tests
This commit is contained in:
parent
b053bfb4de
commit
0b80784df7
|
@ -11,6 +11,7 @@ linters:
|
|||
- gofmt
|
||||
- gofumpt
|
||||
- goimports
|
||||
- testifylint
|
||||
linters-settings:
|
||||
govet:
|
||||
enable-all: true
|
||||
|
@ -32,7 +33,10 @@ linters-settings:
|
|||
gofumpt:
|
||||
module-path: github.com/databricks/cli
|
||||
extra-rules: true
|
||||
#goimports:
|
||||
# local-prefixes: github.com/databricks/cli
|
||||
testifylint:
|
||||
enable-all: true
|
||||
disable:
|
||||
# good check, but we have too many assert.(No)?Errorf? so excluding for now
|
||||
- require-error
|
||||
issues:
|
||||
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
|
||||
|
|
|
@ -2,7 +2,6 @@ package bundle
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -16,7 +15,7 @@ import (
|
|||
|
||||
func TestLoadNotExists(t *testing.T) {
|
||||
b, err := Load(context.Background(), "/doesntexist")
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
assert.Nil(t, b)
|
||||
}
|
||||
|
||||
|
|
|
@ -109,19 +109,19 @@ func TestConfigureDashboardDefaultsEmbedCredentials(t *testing.T) {
|
|||
// Set to true; still true.
|
||||
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d1.embed_credentials")
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, true, v.MustBool())
|
||||
assert.True(t, v.MustBool())
|
||||
}
|
||||
|
||||
// Set to false; still false.
|
||||
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d2.embed_credentials")
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, false, v.MustBool())
|
||||
assert.False(t, v.MustBool())
|
||||
}
|
||||
|
||||
// Not set; now false.
|
||||
v, err = dyn.Get(b.Config.Value(), "resources.dashboards.d3.embed_credentials")
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, false, v.MustBool())
|
||||
assert.False(t, v.MustBool())
|
||||
}
|
||||
|
||||
// No valid dashboard; no change.
|
||||
|
|
|
@ -28,8 +28,8 @@ func TestDefaultQueueingApplyNoJobs(t *testing.T) {
|
|||
},
|
||||
}
|
||||
d := bundle.Apply(context.Background(), b, DefaultQueueing())
|
||||
assert.Len(t, d, 0)
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Empty(t, d)
|
||||
assert.Empty(t, b.Config.Resources.Jobs)
|
||||
}
|
||||
|
||||
func TestDefaultQueueingApplyJobsAlreadyEnabled(t *testing.T) {
|
||||
|
@ -47,7 +47,7 @@ func TestDefaultQueueingApplyJobsAlreadyEnabled(t *testing.T) {
|
|||
},
|
||||
}
|
||||
d := bundle.Apply(context.Background(), b, DefaultQueueing())
|
||||
assert.Len(t, d, 0)
|
||||
assert.Empty(t, d)
|
||||
assert.True(t, b.Config.Resources.Jobs["job"].Queue.Enabled)
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ func TestDefaultQueueingApplyEnableQueueing(t *testing.T) {
|
|||
},
|
||||
}
|
||||
d := bundle.Apply(context.Background(), b, DefaultQueueing())
|
||||
assert.Len(t, d, 0)
|
||||
assert.Empty(t, d)
|
||||
assert.NotNil(t, b.Config.Resources.Jobs["job"].Queue)
|
||||
assert.True(t, b.Config.Resources.Jobs["job"].Queue.Enabled)
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ func TestDefaultQueueingApplyWithMultipleJobs(t *testing.T) {
|
|||
},
|
||||
}
|
||||
d := bundle.Apply(context.Background(), b, DefaultQueueing())
|
||||
assert.Len(t, d, 0)
|
||||
assert.Empty(t, d)
|
||||
assert.False(t, b.Config.Resources.Jobs["job1"].Queue.Enabled)
|
||||
assert.True(t, b.Config.Resources.Jobs["job2"].Queue.Enabled)
|
||||
assert.True(t, b.Config.Resources.Jobs["job3"].Queue.Enabled)
|
||||
|
|
|
@ -44,7 +44,7 @@ func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Len(t, b.Config.Environments, 0)
|
||||
assert.Empty(t, b.Config.Environments)
|
||||
assert.Len(t, b.Config.Targets, 1)
|
||||
}
|
||||
|
||||
|
@ -61,6 +61,6 @@ func TestEnvironmentsToTargetsWithTargetsDefined(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Len(t, b.Config.Environments, 0)
|
||||
assert.Empty(t, b.Config.Environments)
|
||||
assert.Len(t, b.Config.Targets, 1)
|
||||
}
|
||||
|
|
|
@ -74,8 +74,8 @@ func TestMergeJobTasks(t *testing.T) {
|
|||
assert.Equal(t, "i3.2xlarge", cluster.NodeTypeId)
|
||||
assert.Equal(t, 4, cluster.NumWorkers)
|
||||
assert.Len(t, task0.Libraries, 2)
|
||||
assert.Equal(t, task0.Libraries[0].Whl, "package1")
|
||||
assert.Equal(t, task0.Libraries[1].Pypi.Package, "package2")
|
||||
assert.Equal(t, "package1", task0.Libraries[0].Whl)
|
||||
assert.Equal(t, "package2", task0.Libraries[1].Pypi.Package)
|
||||
|
||||
// This task was left untouched.
|
||||
task1 := j.Tasks[1].NewCluster
|
||||
|
|
|
@ -163,18 +163,18 @@ func TestProcessTargetModeDevelopment(t *testing.T) {
|
|||
|
||||
// Job 1
|
||||
assert.Equal(t, "[dev lennart] job1", b.Config.Resources.Jobs["job1"].Name)
|
||||
assert.Equal(t, b.Config.Resources.Jobs["job1"].Tags["existing"], "tag")
|
||||
assert.Equal(t, b.Config.Resources.Jobs["job1"].Tags["dev"], "lennart")
|
||||
assert.Equal(t, b.Config.Resources.Jobs["job1"].Schedule.PauseStatus, jobs.PauseStatusPaused)
|
||||
assert.Equal(t, "tag", b.Config.Resources.Jobs["job1"].Tags["existing"])
|
||||
assert.Equal(t, "lennart", b.Config.Resources.Jobs["job1"].Tags["dev"])
|
||||
assert.Equal(t, jobs.PauseStatusPaused, b.Config.Resources.Jobs["job1"].Schedule.PauseStatus)
|
||||
|
||||
// Job 2
|
||||
assert.Equal(t, "[dev lennart] job2", b.Config.Resources.Jobs["job2"].Name)
|
||||
assert.Equal(t, b.Config.Resources.Jobs["job2"].Tags["dev"], "lennart")
|
||||
assert.Equal(t, b.Config.Resources.Jobs["job2"].Schedule.PauseStatus, jobs.PauseStatusUnpaused)
|
||||
assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"])
|
||||
assert.Equal(t, jobs.PauseStatusUnpaused, b.Config.Resources.Jobs["job2"].Schedule.PauseStatus)
|
||||
|
||||
// Pipeline 1
|
||||
assert.Equal(t, "[dev lennart] pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
|
||||
assert.Equal(t, false, b.Config.Resources.Pipelines["pipeline1"].Continuous)
|
||||
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].Continuous)
|
||||
assert.True(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
|
||||
|
||||
// Experiment 1
|
||||
|
|
|
@ -185,11 +185,11 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
|
|||
// Apply for the variable prefix. This should resolve the variables to their values.
|
||||
diags = bundle.Apply(context.Background(), b, ResolveVariableReferences("variables"))
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForCanceledRuns)
|
||||
assert.Equal(t, true, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForSkippedRuns)
|
||||
assert.True(t, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForCanceledRuns)
|
||||
assert.True(t, b.Config.Resources.Jobs["job1"].JobSettings.NotificationSettings.NoAlertForSkippedRuns)
|
||||
assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MinWorkers)
|
||||
assert.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MaxWorkers)
|
||||
assert.Equal(t, 0.5, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.AzureAttributes.SpotBidMaxPrice)
|
||||
assert.InDelta(t, 0.5, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.AzureAttributes.SpotBidMaxPrice, 0.0001)
|
||||
}
|
||||
|
||||
func TestResolveComplexVariable(t *testing.T) {
|
||||
|
|
|
@ -71,7 +71,7 @@ func TestNoWorkspacePrefixUsed(t *testing.T) {
|
|||
}
|
||||
|
||||
for _, d := range diags {
|
||||
require.Equal(t, d.Severity, diag.Warning)
|
||||
require.Equal(t, diag.Warning, d.Severity)
|
||||
require.Contains(t, expectedErrors, d.Summary)
|
||||
delete(expectedErrors, d.Summary)
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
|
|||
|
||||
err = convert.ToTyped(&variable, v)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, variable.Value, "process-env")
|
||||
assert.Equal(t, "process-env", variable.Value)
|
||||
}
|
||||
|
||||
func TestSetVariableUsingDefaultValue(t *testing.T) {
|
||||
|
@ -48,7 +48,7 @@ func TestSetVariableUsingDefaultValue(t *testing.T) {
|
|||
|
||||
err = convert.ToTyped(&variable, v)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, variable.Value, "default")
|
||||
assert.Equal(t, "default", variable.Value)
|
||||
}
|
||||
|
||||
func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
|
||||
|
@ -70,7 +70,7 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
|
|||
|
||||
err = convert.ToTyped(&variable, v)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, variable.Value, "assigned-value")
|
||||
assert.Equal(t, "assigned-value", variable.Value)
|
||||
}
|
||||
|
||||
func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
|
||||
|
@ -95,7 +95,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
|
|||
|
||||
err = convert.ToTyped(&variable, v)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, variable.Value, "assigned-value")
|
||||
assert.Equal(t, "assigned-value", variable.Value)
|
||||
}
|
||||
|
||||
func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
|
||||
|
|
|
@ -37,11 +37,11 @@ func TestCustomMarshallerIsImplemented(t *testing.T) {
|
|||
field := rt.Field(i)
|
||||
|
||||
// Fields in Resources are expected be of the form map[string]*resourceStruct
|
||||
assert.Equal(t, field.Type.Kind(), reflect.Map, "Resource %s is not a map", field.Name)
|
||||
assert.Equal(t, reflect.Map, field.Type.Kind(), "Resource %s is not a map", field.Name)
|
||||
kt := field.Type.Key()
|
||||
assert.Equal(t, kt.Kind(), reflect.String, "Resource %s is not a map with string keys", field.Name)
|
||||
assert.Equal(t, reflect.String, kt.Kind(), "Resource %s is not a map with string keys", field.Name)
|
||||
vt := field.Type.Elem()
|
||||
assert.Equal(t, vt.Kind(), reflect.Ptr, "Resource %s is not a map with pointer values", field.Name)
|
||||
assert.Equal(t, reflect.Ptr, vt.Kind(), "Resource %s is not a map with pointer values", field.Name)
|
||||
|
||||
// Marshalling a resourceStruct will panic if resourceStruct does not have a custom marshaller
|
||||
// This is because resourceStruct embeds a Go SDK struct that implements
|
||||
|
|
|
@ -87,7 +87,7 @@ func TestFilesToSync_EverythingIgnored(t *testing.T) {
|
|||
ctx := context.Background()
|
||||
rb := bundle.ReadOnly(b)
|
||||
diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync())
|
||||
require.Equal(t, 1, len(diags))
|
||||
require.Len(t, diags, 1)
|
||||
assert.Equal(t, diag.Warning, diags[0].Severity)
|
||||
assert.Equal(t, "There are no files to sync, please check your .gitignore", diags[0].Summary)
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ func TestFilesToSync_EverythingExcluded(t *testing.T) {
|
|||
ctx := context.Background()
|
||||
rb := bundle.ReadOnly(b)
|
||||
diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync())
|
||||
require.Equal(t, 1, len(diags))
|
||||
require.Len(t, diags, 1)
|
||||
assert.Equal(t, diag.Warning, diags[0].Severity)
|
||||
assert.Equal(t, "There are no files to sync, please check your .gitignore and sync.exclude configuration", diags[0].Summary)
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ func TestJobClusterKeyDefined(t *testing.T) {
|
|||
}
|
||||
|
||||
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined())
|
||||
require.Len(t, diags, 0)
|
||||
require.Empty(t, diags)
|
||||
require.NoError(t, diags.Error())
|
||||
}
|
||||
|
||||
|
@ -59,8 +59,8 @@ func TestJobClusterKeyNotDefined(t *testing.T) {
|
|||
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined())
|
||||
require.Len(t, diags, 1)
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, diags[0].Severity, diag.Warning)
|
||||
require.Equal(t, diags[0].Summary, "job_cluster_key do-not-exist is not defined")
|
||||
require.Equal(t, diag.Warning, diags[0].Severity)
|
||||
require.Equal(t, "job_cluster_key do-not-exist is not defined", diags[0].Summary)
|
||||
}
|
||||
|
||||
func TestJobClusterKeyDefinedInDifferentJob(t *testing.T) {
|
||||
|
@ -92,6 +92,6 @@ func TestJobClusterKeyDefinedInDifferentJob(t *testing.T) {
|
|||
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined())
|
||||
require.Len(t, diags, 1)
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, diags[0].Severity, diag.Warning)
|
||||
require.Equal(t, diags[0].Summary, "job_cluster_key do-not-exist is not defined")
|
||||
require.Equal(t, diag.Warning, diags[0].Severity)
|
||||
require.Equal(t, "job_cluster_key do-not-exist is not defined", diags[0].Summary)
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ func TestGetPanics(t *testing.T) {
|
|||
defer func() {
|
||||
r := recover()
|
||||
require.NotNil(t, r, "The function did not panic")
|
||||
assert.Equal(t, r, "context not configured with bundle")
|
||||
assert.Equal(t, "context not configured with bundle", r)
|
||||
}()
|
||||
|
||||
Get(context.Background())
|
||||
|
|
|
@ -4,7 +4,6 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
|
@ -279,7 +278,7 @@ func TestStatePullNoState(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
_, err = os.Stat(statePath)
|
||||
require.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
require.ErrorIs(t, err, fs.ErrNotExist)
|
||||
}
|
||||
|
||||
func TestStatePullOlderState(t *testing.T) {
|
||||
|
|
|
@ -60,7 +60,7 @@ func TestStateUpdate(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, int64(1), state.Seq)
|
||||
require.Equal(t, state.Files, Filelist{
|
||||
require.Equal(t, Filelist{
|
||||
{
|
||||
LocalPath: "test1.py",
|
||||
},
|
||||
|
@ -68,7 +68,7 @@ func TestStateUpdate(t *testing.T) {
|
|||
LocalPath: "test2.py",
|
||||
IsNotebook: true,
|
||||
},
|
||||
})
|
||||
}, state.Files)
|
||||
require.Equal(t, build.GetInfo().Version, state.CliVersion)
|
||||
|
||||
diags = bundle.Apply(ctx, b, s)
|
||||
|
@ -79,7 +79,7 @@ func TestStateUpdate(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, int64(2), state.Seq)
|
||||
require.Equal(t, state.Files, Filelist{
|
||||
require.Equal(t, Filelist{
|
||||
{
|
||||
LocalPath: "test1.py",
|
||||
},
|
||||
|
@ -87,7 +87,7 @@ func TestStateUpdate(t *testing.T) {
|
|||
LocalPath: "test2.py",
|
||||
IsNotebook: true,
|
||||
},
|
||||
})
|
||||
}, state.Files)
|
||||
require.Equal(t, build.GetInfo().Version, state.CliVersion)
|
||||
|
||||
// Valid non-empty UUID is generated.
|
||||
|
@ -130,7 +130,7 @@ func TestStateUpdateWithExistingState(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, int64(11), state.Seq)
|
||||
require.Equal(t, state.Files, Filelist{
|
||||
require.Equal(t, Filelist{
|
||||
{
|
||||
LocalPath: "test1.py",
|
||||
},
|
||||
|
@ -138,7 +138,7 @@ func TestStateUpdateWithExistingState(t *testing.T) {
|
|||
LocalPath: "test2.py",
|
||||
IsNotebook: true,
|
||||
},
|
||||
})
|
||||
}, state.Files)
|
||||
require.Equal(t, build.GetInfo().Version, state.CliVersion)
|
||||
|
||||
// Existing UUID is not overwritten.
|
||||
|
|
|
@ -254,10 +254,10 @@ func TestBundleToTerraformPipeline(t *testing.T) {
|
|||
assert.Equal(t, "my pipeline", resource.Name)
|
||||
assert.Len(t, resource.Library, 2)
|
||||
assert.Len(t, resource.Notification, 2)
|
||||
assert.Equal(t, resource.Notification[0].Alerts, []string{"on-update-fatal-failure"})
|
||||
assert.Equal(t, resource.Notification[0].EmailRecipients, []string{"jane@doe.com"})
|
||||
assert.Equal(t, resource.Notification[1].Alerts, []string{"on-update-failure", "on-flow-failure"})
|
||||
assert.Equal(t, resource.Notification[1].EmailRecipients, []string{"jane@doe.com", "john@doe.com"})
|
||||
assert.Equal(t, []string{"on-update-fatal-failure"}, resource.Notification[0].Alerts)
|
||||
assert.Equal(t, []string{"jane@doe.com"}, resource.Notification[0].EmailRecipients)
|
||||
assert.Equal(t, []string{"on-update-failure", "on-flow-failure"}, resource.Notification[1].Alerts)
|
||||
assert.Equal(t, []string{"jane@doe.com", "john@doe.com"}, resource.Notification[1].EmailRecipients)
|
||||
assert.Nil(t, out.Data)
|
||||
}
|
||||
|
||||
|
@ -454,7 +454,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
|
|||
assert.Equal(t, "name", resource.Name)
|
||||
assert.Equal(t, "model_name", resource.Config.ServedModels[0].ModelName)
|
||||
assert.Equal(t, "1", resource.Config.ServedModels[0].ModelVersion)
|
||||
assert.Equal(t, true, resource.Config.ServedModels[0].ScaleToZeroEnabled)
|
||||
assert.True(t, resource.Config.ServedModels[0].ScaleToZeroEnabled)
|
||||
assert.Equal(t, "Small", resource.Config.ServedModels[0].WorkloadSize)
|
||||
assert.Equal(t, "model_name-1", resource.Config.TrafficConfig.Routes[0].ServedModelName)
|
||||
assert.Equal(t, 100, resource.Config.TrafficConfig.Routes[0].TrafficPercentage)
|
||||
|
|
|
@ -225,7 +225,7 @@ func TestSetProxyEnvVars(t *testing.T) {
|
|||
env := make(map[string]string, 0)
|
||||
err := setProxyEnvVars(context.Background(), env, b)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, env, 0)
|
||||
assert.Empty(t, env)
|
||||
|
||||
// Lower case set.
|
||||
clearEnv()
|
||||
|
@ -293,7 +293,7 @@ func TestSetUserProfileFromInheritEnvVars(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
assert.Contains(t, env, "USERPROFILE")
|
||||
assert.Equal(t, env["USERPROFILE"], "c:\\foo\\c")
|
||||
assert.Equal(t, "c:\\foo\\c", env["USERPROFILE"])
|
||||
}
|
||||
|
||||
func TestInheritEnvVarsWithAbsentTFConfigFile(t *testing.T) {
|
||||
|
|
|
@ -2,7 +2,6 @@ package main
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
|
@ -80,7 +79,7 @@ func TestRequiredAnnotationsForNewFields(t *testing.T) {
|
|||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, updatedFieldPaths, fmt.Sprintf("Missing JSON-schema descriptions for new config fields in bundle/internal/schema/annotations.yml:\n%s", strings.Join(updatedFieldPaths, "\n")))
|
||||
assert.Empty(t, updatedFieldPaths, "Missing JSON-schema descriptions for new config fields in bundle/internal/schema/annotations.yml:\n%s", strings.Join(updatedFieldPaths, "\n"))
|
||||
}
|
||||
|
||||
// Checks whether types in annotation files are still present in Config type
|
||||
|
|
|
@ -212,12 +212,12 @@ func TestFilerForVolumeWithInvalidVolumePaths(t *testing.T) {
|
|||
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}})
|
||||
|
||||
_, _, diags := GetFilerForLibraries(context.Background(), b)
|
||||
require.Equal(t, diags, diag.Diagnostics{{
|
||||
require.Equal(t, diag.Diagnostics{{
|
||||
Severity: diag.Error,
|
||||
Summary: fmt.Sprintf("expected UC volume path to be in the format /Volumes/<catalog>/<schema>/<volume>/..., got %s", p),
|
||||
Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}},
|
||||
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
|
||||
}})
|
||||
}}, diags)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,25 +12,25 @@ func TestLibraryPath(t *testing.T) {
|
|||
|
||||
p, err := libraryPath(&compute.Library{Whl: path})
|
||||
assert.Equal(t, path, p)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
|
||||
p, err = libraryPath(&compute.Library{Jar: path})
|
||||
assert.Equal(t, path, p)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
|
||||
p, err = libraryPath(&compute.Library{Egg: path})
|
||||
assert.Equal(t, path, p)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
|
||||
p, err = libraryPath(&compute.Library{Requirements: path})
|
||||
assert.Equal(t, path, p)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
|
||||
p, err = libraryPath(&compute.Library{})
|
||||
assert.Equal(t, "", p)
|
||||
assert.NotNil(t, err)
|
||||
assert.Error(t, err)
|
||||
|
||||
p, err = libraryPath(&compute.Library{Pypi: &compute.PythonPyPiLibrary{Package: "pypipackage"}})
|
||||
assert.Equal(t, "", p)
|
||||
assert.NotNil(t, err)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
|
|
@ -99,32 +99,32 @@ func TestFilterCurrentUser(t *testing.T) {
|
|||
assert.NoError(t, diags.Error())
|
||||
|
||||
// Assert current user is filtered out.
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Jobs["job1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Jobs["job1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job1"].Permissions, robot)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Jobs["job2"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Jobs["job2"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job2"].Permissions, robot)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job2"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Pipelines["pipeline1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Pipelines["pipeline1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Pipelines["pipeline1"].Permissions, robot)
|
||||
assert.Contains(t, b.Config.Resources.Pipelines["pipeline1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Experiments["experiment1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Experiments["experiment1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Experiments["experiment1"].Permissions, robot)
|
||||
assert.Contains(t, b.Config.Resources.Experiments["experiment1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Models["model1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Models["model1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Models["model1"].Permissions, robot)
|
||||
assert.Contains(t, b.Config.Resources.Models["model1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions, robot)
|
||||
assert.Contains(t, b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions, bob)
|
||||
|
||||
// Assert there's no change to the grant.
|
||||
assert.Equal(t, 1, len(b.Config.Resources.RegisteredModels["registered_model1"].Grants))
|
||||
assert.Len(t, b.Config.Resources.RegisteredModels["registered_model1"].Grants, 1)
|
||||
}
|
||||
|
||||
func TestFilterCurrentServicePrincipal(t *testing.T) {
|
||||
|
@ -134,32 +134,32 @@ func TestFilterCurrentServicePrincipal(t *testing.T) {
|
|||
assert.NoError(t, diags.Error())
|
||||
|
||||
// Assert current user is filtered out.
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Jobs["job1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Jobs["job1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job1"].Permissions, alice)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Jobs["job2"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Jobs["job2"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job2"].Permissions, alice)
|
||||
assert.Contains(t, b.Config.Resources.Jobs["job2"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Pipelines["pipeline1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Pipelines["pipeline1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Pipelines["pipeline1"].Permissions, alice)
|
||||
assert.Contains(t, b.Config.Resources.Pipelines["pipeline1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Experiments["experiment1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Experiments["experiment1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Experiments["experiment1"].Permissions, alice)
|
||||
assert.Contains(t, b.Config.Resources.Experiments["experiment1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.Models["model1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.Models["model1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.Models["model1"].Permissions, alice)
|
||||
assert.Contains(t, b.Config.Resources.Models["model1"].Permissions, bob)
|
||||
|
||||
assert.Equal(t, 2, len(b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions))
|
||||
assert.Len(t, b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions, 2)
|
||||
assert.Contains(t, b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions, alice)
|
||||
assert.Contains(t, b.Config.Resources.ModelServingEndpoints["endpoint1"].Permissions, bob)
|
||||
|
||||
// Assert there's no change to the grant.
|
||||
assert.Equal(t, 1, len(b.Config.Resources.RegisteredModels["registered_model1"].Grants))
|
||||
assert.Len(t, b.Config.Resources.RegisteredModels["registered_model1"].Grants, 1)
|
||||
}
|
||||
|
||||
func TestFilterCurrentUserDoesNotErrorWhenNoResources(t *testing.T) {
|
||||
|
|
|
@ -164,7 +164,7 @@ func TestAllResourcesExplicitlyDefinedForPermissionsSupport(t *testing.T) {
|
|||
|
||||
for _, resource := range unsupportedResources {
|
||||
_, ok := levelsMap[resource]
|
||||
assert.False(t, ok, fmt.Sprintf("Resource %s is defined in both levelsMap and unsupportedResources", resource))
|
||||
assert.False(t, ok, "Resource %s is defined in both levelsMap and unsupportedResources", resource)
|
||||
}
|
||||
|
||||
for _, resource := range r.AllResources() {
|
||||
|
|
|
@ -28,7 +28,7 @@ func TestPermissionDiagnosticsApplyFail(t *testing.T) {
|
|||
})
|
||||
|
||||
diags := permissions.PermissionDiagnostics().Apply(context.Background(), b)
|
||||
require.Equal(t, diags[0].Severity, diag.Warning)
|
||||
require.Equal(t, diag.Warning, diags[0].Severity)
|
||||
require.Contains(t, diags[0].Summary, "permissions section should include testuser@databricks.com or one of their groups with CAN_MANAGE permissions")
|
||||
}
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ func TestConvertPythonParams(t *testing.T) {
|
|||
err = runner.convertPythonParams(opts)
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, opts.Job.notebookParams, "__python_params")
|
||||
require.Equal(t, opts.Job.notebookParams["__python_params"], `["param1","param2","param3"]`)
|
||||
require.Equal(t, `["param1","param2","param3"]`, opts.Job.notebookParams["__python_params"])
|
||||
}
|
||||
|
||||
func TestJobRunnerCancel(t *testing.T) {
|
||||
|
|
|
@ -30,7 +30,7 @@ func TestComplexVariables(t *testing.T) {
|
|||
require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])
|
||||
require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.random"])
|
||||
|
||||
require.Equal(t, 3, len(b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries))
|
||||
require.Len(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, 3)
|
||||
require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{
|
||||
Jar: "/path/to/jar",
|
||||
})
|
||||
|
|
|
@ -2,7 +2,6 @@ package config_tests
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -16,7 +15,7 @@ func TestGitAutoLoadWithEnvironment(t *testing.T) {
|
|||
bundle.Apply(context.Background(), b, mutator.LoadGitDetails())
|
||||
assert.True(t, b.Config.Bundle.Git.Inferred)
|
||||
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
|
||||
assert.True(t, validUrl, fmt.Sprintf("Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL))
|
||||
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)
|
||||
}
|
||||
|
||||
func TestGitManuallySetBranchWithEnvironment(t *testing.T) {
|
||||
|
@ -25,5 +24,5 @@ func TestGitManuallySetBranchWithEnvironment(t *testing.T) {
|
|||
assert.False(t, b.Config.Bundle.Git.Inferred)
|
||||
assert.Equal(t, "main", b.Config.Bundle.Git.Branch)
|
||||
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
|
||||
assert.True(t, validUrl, fmt.Sprintf("Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL))
|
||||
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ func TestEnvironmentOverridesResourcesDev(t *testing.T) {
|
|||
assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name)
|
||||
|
||||
// Base values are preserved in the development environment.
|
||||
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon)
|
||||
assert.Equal(t, false, b.Config.Resources.Pipelines["boolean2"].Photon)
|
||||
assert.True(t, b.Config.Resources.Pipelines["boolean1"].Photon)
|
||||
assert.False(t, b.Config.Resources.Pipelines["boolean2"].Photon)
|
||||
}
|
||||
|
||||
func TestEnvironmentOverridesResourcesStaging(t *testing.T) {
|
||||
|
@ -30,6 +30,6 @@ func TestEnvironmentOverridesResourcesStaging(t *testing.T) {
|
|||
assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name)
|
||||
|
||||
// Override values are applied in the staging environment.
|
||||
assert.Equal(t, false, b.Config.Resources.Pipelines["boolean1"].Photon)
|
||||
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean2"].Photon)
|
||||
assert.False(t, b.Config.Resources.Pipelines["boolean1"].Photon)
|
||||
assert.True(t, b.Config.Resources.Pipelines["boolean2"].Photon)
|
||||
}
|
||||
|
|
|
@ -10,11 +10,11 @@ import (
|
|||
|
||||
func TestJobAndPipelineDevelopmentWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_job_and_pipeline", "development")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Empty(t, b.Config.Resources.Jobs)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
|
||||
assert.Equal(t, config.Development, b.Config.Bundle.Mode)
|
||||
assert.True(t, p.Development)
|
||||
require.Len(t, p.Libraries, 1)
|
||||
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
|
||||
|
@ -23,7 +23,7 @@ func TestJobAndPipelineDevelopmentWithEnvironment(t *testing.T) {
|
|||
|
||||
func TestJobAndPipelineStagingWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_job_and_pipeline", "staging")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Empty(t, b.Config.Resources.Jobs)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||
|
|
|
@ -2,7 +2,6 @@ package config_tests
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -17,7 +16,7 @@ func TestGitAutoLoad(t *testing.T) {
|
|||
bundle.Apply(context.Background(), b, mutator.LoadGitDetails())
|
||||
assert.True(t, b.Config.Bundle.Git.Inferred)
|
||||
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
|
||||
assert.True(t, validUrl, fmt.Sprintf("Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL))
|
||||
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)
|
||||
}
|
||||
|
||||
func TestGitManuallySetBranch(t *testing.T) {
|
||||
|
@ -26,7 +25,7 @@ func TestGitManuallySetBranch(t *testing.T) {
|
|||
assert.False(t, b.Config.Bundle.Git.Inferred)
|
||||
assert.Equal(t, "main", b.Config.Bundle.Git.Branch)
|
||||
validUrl := strings.Contains(b.Config.Bundle.Git.OriginURL, "/cli") || strings.Contains(b.Config.Bundle.Git.OriginURL, "/bricks")
|
||||
assert.True(t, validUrl, fmt.Sprintf("Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL))
|
||||
assert.True(t, validUrl, "Expected URL to contain '/cli' or '/bricks', got %s", b.Config.Bundle.Git.OriginURL)
|
||||
}
|
||||
|
||||
func TestGitBundleBranchValidation(t *testing.T) {
|
||||
|
|
|
@ -35,7 +35,7 @@ func TestIssue1828(t *testing.T) {
|
|||
}
|
||||
|
||||
if assert.Contains(t, b.Config.Variables, "float") {
|
||||
assert.Equal(t, 3.14, b.Config.Variables["float"].Default)
|
||||
assert.InDelta(t, 3.14, b.Config.Variables["float"].Default, 0.0001)
|
||||
}
|
||||
|
||||
if assert.Contains(t, b.Config.Variables, "time") {
|
||||
|
@ -43,6 +43,6 @@ func TestIssue1828(t *testing.T) {
|
|||
}
|
||||
|
||||
if assert.Contains(t, b.Config.Variables, "nil") {
|
||||
assert.Equal(t, nil, b.Config.Variables["nil"].Default)
|
||||
assert.Nil(t, b.Config.Variables["nil"].Default)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,11 +10,11 @@ import (
|
|||
|
||||
func TestJobAndPipelineDevelopment(t *testing.T) {
|
||||
b := loadTarget(t, "./job_and_pipeline", "development")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Empty(t, b.Config.Resources.Jobs)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
|
||||
assert.Equal(t, config.Development, b.Config.Bundle.Mode)
|
||||
assert.True(t, p.Development)
|
||||
require.Len(t, p.Libraries, 1)
|
||||
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
|
||||
|
@ -23,7 +23,7 @@ func TestJobAndPipelineDevelopment(t *testing.T) {
|
|||
|
||||
func TestJobAndPipelineStaging(t *testing.T) {
|
||||
b := loadTarget(t, "./job_and_pipeline", "staging")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Empty(t, b.Config.Resources.Jobs)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||
|
|
|
@ -16,13 +16,13 @@ func TestJobClusterKeyNotDefinedTest(t *testing.T) {
|
|||
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.JobClusterKeyDefined())
|
||||
require.Len(t, diags, 1)
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, diags[0].Severity, diag.Warning)
|
||||
require.Equal(t, diags[0].Summary, "job_cluster_key key is not defined")
|
||||
require.Equal(t, diag.Warning, diags[0].Severity)
|
||||
require.Equal(t, "job_cluster_key key is not defined", diags[0].Summary)
|
||||
}
|
||||
|
||||
func TestJobClusterKeyDefinedTest(t *testing.T) {
|
||||
b := loadTarget(t, "./job_cluster_key", "development")
|
||||
|
||||
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.JobClusterKeyDefined())
|
||||
require.Len(t, diags, 0)
|
||||
require.Empty(t, diags)
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ func assertExpected(t *testing.T, p *resources.ModelServingEndpoint) {
|
|||
func TestModelServingEndpointDevelopment(t *testing.T) {
|
||||
b := loadTarget(t, "./model_serving_endpoint", "development")
|
||||
assert.Len(t, b.Config.Resources.ModelServingEndpoints, 1)
|
||||
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
|
||||
assert.Equal(t, config.Development, b.Config.Bundle.Mode)
|
||||
|
||||
p := b.Config.Resources.ModelServingEndpoints["my_model_serving_endpoint"]
|
||||
assert.Equal(t, "my-dev-endpoint", p.Name)
|
||||
|
|
|
@ -12,14 +12,14 @@ func TestOverrideTasksDev(t *testing.T) {
|
|||
assert.Len(t, b.Config.Resources.Jobs["foo"].Tasks, 2)
|
||||
|
||||
tasks := b.Config.Resources.Jobs["foo"].Tasks
|
||||
assert.Equal(t, tasks[0].TaskKey, "key1")
|
||||
assert.Equal(t, tasks[0].NewCluster.NodeTypeId, "i3.xlarge")
|
||||
assert.Equal(t, tasks[0].NewCluster.NumWorkers, 1)
|
||||
assert.Equal(t, tasks[0].SparkPythonTask.PythonFile, "./test1.py")
|
||||
assert.Equal(t, "key1", tasks[0].TaskKey)
|
||||
assert.Equal(t, "i3.xlarge", tasks[0].NewCluster.NodeTypeId)
|
||||
assert.Equal(t, 1, tasks[0].NewCluster.NumWorkers)
|
||||
assert.Equal(t, "./test1.py", tasks[0].SparkPythonTask.PythonFile)
|
||||
|
||||
assert.Equal(t, tasks[1].TaskKey, "key2")
|
||||
assert.Equal(t, tasks[1].NewCluster.SparkVersion, "13.3.x-scala2.12")
|
||||
assert.Equal(t, tasks[1].SparkPythonTask.PythonFile, "./test2.py")
|
||||
assert.Equal(t, "key2", tasks[1].TaskKey)
|
||||
assert.Equal(t, "13.3.x-scala2.12", tasks[1].NewCluster.SparkVersion)
|
||||
assert.Equal(t, "./test2.py", tasks[1].SparkPythonTask.PythonFile)
|
||||
}
|
||||
|
||||
func TestOverrideTasksStaging(t *testing.T) {
|
||||
|
@ -28,12 +28,12 @@ func TestOverrideTasksStaging(t *testing.T) {
|
|||
assert.Len(t, b.Config.Resources.Jobs["foo"].Tasks, 2)
|
||||
|
||||
tasks := b.Config.Resources.Jobs["foo"].Tasks
|
||||
assert.Equal(t, tasks[0].TaskKey, "key1")
|
||||
assert.Equal(t, tasks[0].NewCluster.SparkVersion, "13.3.x-scala2.12")
|
||||
assert.Equal(t, tasks[0].SparkPythonTask.PythonFile, "./test1.py")
|
||||
assert.Equal(t, "key1", tasks[0].TaskKey)
|
||||
assert.Equal(t, "13.3.x-scala2.12", tasks[0].NewCluster.SparkVersion)
|
||||
assert.Equal(t, "./test1.py", tasks[0].SparkPythonTask.PythonFile)
|
||||
|
||||
assert.Equal(t, tasks[1].TaskKey, "key2")
|
||||
assert.Equal(t, tasks[1].NewCluster.NodeTypeId, "i3.2xlarge")
|
||||
assert.Equal(t, tasks[1].NewCluster.NumWorkers, 4)
|
||||
assert.Equal(t, tasks[1].SparkPythonTask.PythonFile, "./test3.py")
|
||||
assert.Equal(t, "key2", tasks[1].TaskKey)
|
||||
assert.Equal(t, "i3.2xlarge", tasks[1].NewCluster.NodeTypeId)
|
||||
assert.Equal(t, 4, tasks[1].NewCluster.NumWorkers)
|
||||
assert.Equal(t, "./test3.py", tasks[1].SparkPythonTask.PythonFile)
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ func TestPresetsDev(t *testing.T) {
|
|||
assert.Equal(t, "myprefix", b.Config.Presets.NamePrefix)
|
||||
assert.Equal(t, config.Paused, b.Config.Presets.TriggerPauseStatus)
|
||||
assert.Equal(t, 10, b.Config.Presets.JobsMaxConcurrentRuns)
|
||||
assert.Equal(t, true, *b.Config.Presets.PipelinesDevelopment)
|
||||
assert.True(t, *b.Config.Presets.PipelinesDevelopment)
|
||||
assert.Equal(t, "true", b.Config.Presets.Tags["dev"])
|
||||
assert.Equal(t, "finance", b.Config.Presets.Tags["team"])
|
||||
assert.Equal(t, "false", b.Config.Presets.Tags["prod"])
|
||||
|
@ -22,7 +22,7 @@ func TestPresetsDev(t *testing.T) {
|
|||
func TestPresetsProd(t *testing.T) {
|
||||
b := loadTarget(t, "./presets", "prod")
|
||||
|
||||
assert.Equal(t, false, *b.Config.Presets.PipelinesDevelopment)
|
||||
assert.False(t, *b.Config.Presets.PipelinesDevelopment)
|
||||
assert.Equal(t, "finance", b.Config.Presets.Tags["team"])
|
||||
assert.Equal(t, "true", b.Config.Presets.Tags["prod"])
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ func TestPythonWheelBuild(t *testing.T) {
|
|||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(matches))
|
||||
require.Len(t, matches, 1)
|
||||
|
||||
match := libraries.ExpandGlobReferences()
|
||||
diags = bundle.Apply(ctx, b, match)
|
||||
|
@ -39,7 +39,7 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) {
|
|||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(matches))
|
||||
require.Len(t, matches, 1)
|
||||
|
||||
match := libraries.ExpandGlobReferences()
|
||||
diags = bundle.Apply(ctx, b, match)
|
||||
|
@ -55,7 +55,7 @@ func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) {
|
|||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(matches))
|
||||
require.Len(t, matches, 1)
|
||||
|
||||
match := libraries.ExpandGlobReferences()
|
||||
diags = bundle.Apply(ctx, b, match)
|
||||
|
@ -108,7 +108,7 @@ func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) {
|
|||
|
||||
matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(matches))
|
||||
require.Len(t, matches, 1)
|
||||
|
||||
match := libraries.ExpandGlobReferences()
|
||||
diags = bundle.Apply(ctx, b, match)
|
||||
|
@ -124,7 +124,7 @@ func TestPythonWheelBuildMultiple(t *testing.T) {
|
|||
|
||||
matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(matches))
|
||||
require.Len(t, matches, 2)
|
||||
|
||||
match := libraries.ExpandGlobReferences()
|
||||
diags = bundle.Apply(ctx, b, match)
|
||||
|
|
|
@ -19,7 +19,7 @@ func assertExpectedMonitor(t *testing.T, p *resources.QualityMonitor) {
|
|||
func TestMonitorTableNames(t *testing.T) {
|
||||
b := loadTarget(t, "./quality_monitor", "development")
|
||||
assert.Len(t, b.Config.Resources.QualityMonitors, 1)
|
||||
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
|
||||
assert.Equal(t, config.Development, b.Config.Bundle.Mode)
|
||||
|
||||
p := b.Config.Resources.QualityMonitors["my_monitor"]
|
||||
assert.Equal(t, "main.test.dev", p.TableName)
|
||||
|
|
|
@ -19,7 +19,7 @@ func assertExpectedModel(t *testing.T, p *resources.RegisteredModel) {
|
|||
func TestRegisteredModelDevelopment(t *testing.T) {
|
||||
b := loadTarget(t, "./registered_model", "development")
|
||||
assert.Len(t, b.Config.Resources.RegisteredModels, 1)
|
||||
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
|
||||
assert.Equal(t, config.Development, b.Config.Bundle.Mode)
|
||||
|
||||
p := b.Config.Resources.RegisteredModels["my_registered_model"]
|
||||
assert.Equal(t, "my-dev-model", p.Name)
|
||||
|
|
|
@ -20,26 +20,26 @@ func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) {
|
|||
require.Len(t, diags, 3)
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
require.Equal(t, diags[0].Severity, diag.Warning)
|
||||
require.Equal(t, diags[0].Summary, "Pattern dist does not match any files")
|
||||
require.Equal(t, diag.Warning, diags[0].Severity)
|
||||
require.Equal(t, "Pattern dist does not match any files", diags[0].Summary)
|
||||
|
||||
require.Len(t, diags[0].Paths, 1)
|
||||
require.Equal(t, diags[0].Paths[0].String(), "sync.exclude[0]")
|
||||
require.Equal(t, "sync.exclude[0]", diags[0].Paths[0].String())
|
||||
|
||||
assert.Len(t, diags[0].Locations, 1)
|
||||
require.Equal(t, diags[0].Locations[0].File, filepath.Join("sync", "override", "databricks.yml"))
|
||||
require.Equal(t, diags[0].Locations[0].Line, 17)
|
||||
require.Equal(t, diags[0].Locations[0].Column, 11)
|
||||
require.Equal(t, 17, diags[0].Locations[0].Line)
|
||||
require.Equal(t, 11, diags[0].Locations[0].Column)
|
||||
|
||||
summaries := []string{
|
||||
fmt.Sprintf("Pattern %s does not match any files", filepath.Join("src", "*")),
|
||||
fmt.Sprintf("Pattern %s does not match any files", filepath.Join("tests", "*")),
|
||||
}
|
||||
|
||||
require.Equal(t, diags[1].Severity, diag.Warning)
|
||||
require.Equal(t, diag.Warning, diags[1].Severity)
|
||||
require.Contains(t, summaries, diags[1].Summary)
|
||||
|
||||
require.Equal(t, diags[2].Severity, diag.Warning)
|
||||
require.Equal(t, diag.Warning, diags[2].Severity)
|
||||
require.Contains(t, summaries, diags[2].Summary)
|
||||
}
|
||||
|
||||
|
@ -47,7 +47,7 @@ func TestSyncIncludeWithNegate(t *testing.T) {
|
|||
b := loadTarget(t, "./sync/negate", "default")
|
||||
|
||||
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.ValidateSyncPatterns())
|
||||
require.Len(t, diags, 0)
|
||||
require.Empty(t, diags)
|
||||
require.NoError(t, diags.Error())
|
||||
}
|
||||
|
||||
|
@ -58,6 +58,6 @@ func TestSyncIncludeWithNegateNoMatches(t *testing.T) {
|
|||
require.Len(t, diags, 1)
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
require.Equal(t, diags[0].Severity, diag.Warning)
|
||||
require.Equal(t, diags[0].Summary, "Pattern !*.txt2 does not match any files")
|
||||
require.Equal(t, diag.Warning, diags[0].Severity)
|
||||
require.Equal(t, "Pattern !*.txt2 does not match any files", diags[0].Summary)
|
||||
}
|
||||
|
|
|
@ -115,12 +115,12 @@ func TestSyncPathsNoRoot(t *testing.T) {
|
|||
// If set to nil, it won't sync anything.
|
||||
b = loadTarget(t, "./sync/paths_no_root", "nil")
|
||||
assert.Equal(t, filepath.FromSlash("sync/paths_no_root"), b.SyncRootPath)
|
||||
assert.Len(t, b.Config.Sync.Paths, 0)
|
||||
assert.Empty(t, b.Config.Sync.Paths)
|
||||
|
||||
// If set to an empty sequence, it won't sync anything.
|
||||
b = loadTarget(t, "./sync/paths_no_root", "empty")
|
||||
assert.Equal(t, filepath.FromSlash("sync/paths_no_root"), b.SyncRootPath)
|
||||
assert.Len(t, b.Config.Sync.Paths, 0)
|
||||
assert.Empty(t, b.Config.Sync.Paths)
|
||||
}
|
||||
|
||||
func TestSyncSharedCode(t *testing.T) {
|
||||
|
|
|
@ -44,7 +44,7 @@ func TestDashboard_ErrorOnLegacyDashboard(t *testing.T) {
|
|||
|
||||
_, diags := d.resolveID(ctx, b)
|
||||
require.Len(t, diags, 1)
|
||||
assert.Equal(t, diags[0].Summary, "dashboard \"legacy dashboard\" is a legacy dashboard")
|
||||
assert.Equal(t, "dashboard \"legacy dashboard\" is a legacy dashboard", diags[0].Summary)
|
||||
}
|
||||
|
||||
func TestDashboard_ExistingID_Nominal(t *testing.T) {
|
||||
|
|
|
@ -3,7 +3,6 @@ package generate
|
|||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
|
@ -302,7 +301,7 @@ func TestGenerateJobCommandOldFileRename(t *testing.T) {
|
|||
|
||||
// Make sure file do not exists after the run
|
||||
_, err = os.Stat(oldFilename)
|
||||
require.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
require.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
data, err := os.ReadFile(filepath.Join(configDir, "test_job.job.yml"))
|
||||
require.NoError(t, err)
|
||||
|
|
|
@ -148,9 +148,9 @@ func TestEnvVarsConfigureNoInteractive(t *testing.T) {
|
|||
|
||||
// We should only save host and token for a profile, other env variables should not be saved
|
||||
_, err = defaultSection.GetKey("auth_type")
|
||||
assert.NotNil(t, err)
|
||||
assert.Error(t, err)
|
||||
_, err = defaultSection.GetKey("metadata_service_url")
|
||||
assert.NotNil(t, err)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestEnvVarsConfigureNoArgsNoInteractive(t *testing.T) {
|
||||
|
|
|
@ -7,14 +7,15 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestFileFromRef(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/databrickslabs/ucx/main/README.md" {
|
||||
_, err := w.Write([]byte(`abc`))
|
||||
require.NoError(t, err)
|
||||
if !assert.NoError(t, err) {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
t.Logf("Requested: %s", r.URL.Path)
|
||||
|
@ -34,7 +35,9 @@ func TestDownloadZipball(t *testing.T) {
|
|||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/repos/databrickslabs/ucx/zipball/main" {
|
||||
_, err := w.Write([]byte(`abc`))
|
||||
require.NoError(t, err)
|
||||
if !assert.NoError(t, err) {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
t.Logf("Requested: %s", r.URL.Path)
|
||||
|
|
|
@ -7,14 +7,15 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestLoadsReleasesForCLI(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/repos/databricks/cli/releases" {
|
||||
_, err := w.Write([]byte(`[{"tag_name": "v1.2.3"}, {"tag_name": "v1.2.2"}]`))
|
||||
require.NoError(t, err)
|
||||
if !assert.NoError(t, err) {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
t.Logf("Requested: %s", r.URL.Path)
|
||||
|
|
|
@ -7,14 +7,13 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestRepositories(t *testing.T) {
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/users/databrickslabs/repos" {
|
||||
_, err := w.Write([]byte(`[{"name": "x"}]`))
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
}
|
||||
t.Logf("Requested: %s", r.URL.Path)
|
||||
|
@ -28,5 +27,5 @@ func TestRepositories(t *testing.T) {
|
|||
r := NewRepositoryCache("databrickslabs", t.TempDir())
|
||||
all, err := r.Load(ctx)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, len(all) > 0)
|
||||
assert.NotEmpty(t, all)
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ func TestCreatesDirectoryIfNeeded(t *testing.T) {
|
|||
}
|
||||
first, err := c.Load(ctx, tick)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, first, int64(1))
|
||||
assert.Equal(t, int64(1), first)
|
||||
}
|
||||
|
||||
func TestImpossibleToCreateDir(t *testing.T) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import (
|
|||
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||
"github.com/databricks/databricks-sdk-go/service/sql"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@ -169,17 +170,17 @@ func TestInstallerWorksForReleases(t *testing.T) {
|
|||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/databrickslabs/blueprint/v0.3.15/labs.yml" {
|
||||
raw, err := os.ReadFile("testdata/installed-in-home/.databricks/labs/blueprint/lib/labs.yml")
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
_, err = w.Write(raw)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
}
|
||||
if r.URL.Path == "/repos/databrickslabs/blueprint/zipball/v0.3.15" {
|
||||
raw, err := zipballFromFolder("testdata/installed-in-home/.databricks/labs/blueprint/lib")
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
w.Header().Add("Content-Type", "application/octet-stream")
|
||||
_, err = w.Write(raw)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
}
|
||||
if r.URL.Path == "/api/2.1/clusters/get" {
|
||||
|
@ -376,17 +377,17 @@ func TestUpgraderWorksForReleases(t *testing.T) {
|
|||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/databrickslabs/blueprint/v0.4.0/labs.yml" {
|
||||
raw, err := os.ReadFile("testdata/installed-in-home/.databricks/labs/blueprint/lib/labs.yml")
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
_, err = w.Write(raw)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
}
|
||||
if r.URL.Path == "/repos/databrickslabs/blueprint/zipball/v0.4.0" {
|
||||
raw, err := zipballFromFolder("testdata/installed-in-home/.databricks/labs/blueprint/lib")
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
w.Header().Add("Content-Type", "application/octet-stream")
|
||||
_, err = w.Write(raw)
|
||||
require.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
}
|
||||
if r.URL.Path == "/api/2.1/clusters/get" {
|
||||
|
|
|
@ -85,13 +85,13 @@ func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) {
|
|||
|
||||
// The remote path attribute on the artifact file should have been set.
|
||||
require.Regexp(t,
|
||||
regexp.MustCompile(path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`)),
|
||||
path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`),
|
||||
b.Config.Artifacts["test"].Files[0].RemotePath,
|
||||
)
|
||||
|
||||
// The task library path should have been updated to the remote path.
|
||||
require.Regexp(t,
|
||||
regexp.MustCompile(path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`)),
|
||||
path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`),
|
||||
b.Config.Resources.Jobs["test"].JobSettings.Tasks[0].Libraries[0].Whl,
|
||||
)
|
||||
}
|
||||
|
@ -149,13 +149,13 @@ func TestUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) {
|
|||
|
||||
// The remote path attribute on the artifact file should have been set.
|
||||
require.Regexp(t,
|
||||
regexp.MustCompile(path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`)),
|
||||
path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`),
|
||||
b.Config.Artifacts["test"].Files[0].RemotePath,
|
||||
)
|
||||
|
||||
// The job environment deps path should have been updated to the remote path.
|
||||
require.Regexp(t,
|
||||
regexp.MustCompile(path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`)),
|
||||
path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`),
|
||||
b.Config.Resources.Jobs["test"].JobSettings.Environments[0].Spec.Dependencies[0],
|
||||
)
|
||||
}
|
||||
|
@ -218,13 +218,13 @@ func TestUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
|
|||
|
||||
// The remote path attribute on the artifact file should have been set.
|
||||
require.Regexp(t,
|
||||
regexp.MustCompile(path.Join(regexp.QuoteMeta(volumePath), `.internal/test\.whl`)),
|
||||
path.Join(regexp.QuoteMeta(volumePath), `.internal/test\.whl`),
|
||||
b.Config.Artifacts["test"].Files[0].RemotePath,
|
||||
)
|
||||
|
||||
// The task library path should have been updated to the remote path.
|
||||
require.Regexp(t,
|
||||
regexp.MustCompile(path.Join(regexp.QuoteMeta(volumePath), `.internal/test\.whl`)),
|
||||
path.Join(regexp.QuoteMeta(volumePath), `.internal/test\.whl`),
|
||||
b.Config.Resources.Jobs["test"].JobSettings.Tasks[0].Libraries[0].Whl,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ package bundle_test
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
@ -99,7 +98,7 @@ func TestBundleDeployUcSchema(t *testing.T) {
|
|||
// Assert the schema is deleted
|
||||
_, err = w.Schemas.GetByFullName(ctx, strings.Join([]string{catalogName, schemaName}, "."))
|
||||
apiErr := &apierr.APIError{}
|
||||
assert.True(t, errors.As(err, &apiErr))
|
||||
assert.ErrorAs(t, err, &apiErr)
|
||||
assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package bundle_test
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
@ -71,11 +70,11 @@ func TestBundleDestroy(t *testing.T) {
|
|||
// Assert snapshot file is deleted
|
||||
entries, err = os.ReadDir(snapshotsDir)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, entries, 0)
|
||||
assert.Empty(t, entries)
|
||||
|
||||
// Assert bundle deployment path is deleted
|
||||
_, err = w.Workspace.GetStatusByPath(ctx, remoteRoot)
|
||||
apiErr := &apierr.APIError{}
|
||||
assert.True(t, errors.As(err, &apiErr))
|
||||
assert.ErrorAs(t, err, &apiErr)
|
||||
assert.Equal(t, "RESOURCE_DOES_NOT_EXIST", apiErr.ErrorCode)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@ import (
|
|||
"encoding/json"
|
||||
"io/fs"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -65,11 +64,11 @@ func TestFsLs(t *testing.T) {
|
|||
|
||||
assert.Equal(t, "a", parsedStdout[0]["name"])
|
||||
assert.Equal(t, true, parsedStdout[0]["is_directory"])
|
||||
assert.Equal(t, float64(0), parsedStdout[0]["size"])
|
||||
assert.InDelta(t, float64(0), parsedStdout[0]["size"], 0.0001)
|
||||
|
||||
assert.Equal(t, "bye.txt", parsedStdout[1]["name"])
|
||||
assert.Equal(t, false, parsedStdout[1]["is_directory"])
|
||||
assert.Equal(t, float64(3), parsedStdout[1]["size"])
|
||||
assert.InDelta(t, float64(3), parsedStdout[1]["size"], 0.0001)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -99,11 +98,11 @@ func TestFsLsWithAbsolutePaths(t *testing.T) {
|
|||
|
||||
assert.Equal(t, path.Join(tmpDir, "a"), parsedStdout[0]["name"])
|
||||
assert.Equal(t, true, parsedStdout[0]["is_directory"])
|
||||
assert.Equal(t, float64(0), parsedStdout[0]["size"])
|
||||
assert.InDelta(t, float64(0), parsedStdout[0]["size"], 0.0001)
|
||||
|
||||
assert.Equal(t, path.Join(tmpDir, "bye.txt"), parsedStdout[1]["name"])
|
||||
assert.Equal(t, false, parsedStdout[1]["is_directory"])
|
||||
assert.Equal(t, float64(3), parsedStdout[1]["size"])
|
||||
assert.InDelta(t, float64(3), parsedStdout[1]["size"].(float64), 0.0001)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -122,7 +121,7 @@ func TestFsLsOnFile(t *testing.T) {
|
|||
setupLsFiles(t, f)
|
||||
|
||||
_, _, err := testcli.RequireErrorRun(t, ctx, "fs", "ls", path.Join(tmpDir, "a", "hello.txt"), "--output=json")
|
||||
assert.Regexp(t, regexp.MustCompile("not a directory: .*/a/hello.txt"), err.Error())
|
||||
assert.Regexp(t, "not a directory: .*/a/hello.txt", err.Error())
|
||||
assert.ErrorAs(t, err, &filer.NotADirectory{})
|
||||
})
|
||||
}
|
||||
|
@ -147,7 +146,7 @@ func TestFsLsOnEmptyDir(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
// assert on ls output
|
||||
assert.Equal(t, 0, len(parsedStdout))
|
||||
assert.Empty(t, parsedStdout)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +165,7 @@ func TestFsLsForNonexistingDir(t *testing.T) {
|
|||
|
||||
_, _, err := testcli.RequireErrorRun(t, ctx, "fs", "ls", path.Join(tmpDir, "nonexistent"), "--output=json")
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
assert.Regexp(t, regexp.MustCompile("no such directory: .*/nonexistent"), err.Error())
|
||||
assert.Regexp(t, "no such directory: .*/nonexistent", err.Error())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ func TestFsMkdir(t *testing.T) {
|
|||
info, err := f.Stat(context.Background(), "a")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", info.Name())
|
||||
assert.Equal(t, true, info.IsDir())
|
||||
assert.True(t, info.IsDir())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -60,19 +60,19 @@ func TestFsMkdirCreatesIntermediateDirectories(t *testing.T) {
|
|||
infoA, err := f.Stat(context.Background(), "a")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "a", infoA.Name())
|
||||
assert.Equal(t, true, infoA.IsDir())
|
||||
assert.True(t, infoA.IsDir())
|
||||
|
||||
// assert directory "b" is created
|
||||
infoB, err := f.Stat(context.Background(), "a/b")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "b", infoB.Name())
|
||||
assert.Equal(t, true, infoB.IsDir())
|
||||
assert.True(t, infoB.IsDir())
|
||||
|
||||
// assert directory "c" is created
|
||||
infoC, err := f.Stat(context.Background(), "a/b/c")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "c", infoC.Name())
|
||||
assert.Equal(t, true, infoC.IsDir())
|
||||
assert.True(t, infoC.IsDir())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -225,7 +225,7 @@ func (a *syncTest) snapshotContains(files []string) {
|
|||
assert.Equal(a.t, s.RemotePath, a.remoteRoot)
|
||||
for _, filePath := range files {
|
||||
_, ok := s.LastModifiedTimes[filePath]
|
||||
assert.True(a.t, ok, fmt.Sprintf("%s not in snapshot file: %v", filePath, s.LastModifiedTimes))
|
||||
assert.True(a.t, ok, "%s not in snapshot file: %v", filePath, s.LastModifiedTimes)
|
||||
}
|
||||
assert.Equal(a.t, len(files), len(s.LastModifiedTimes))
|
||||
}
|
||||
|
|
|
@ -4,11 +4,9 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"io/fs"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
|
@ -106,7 +104,7 @@ func commonFilerRecursiveDeleteTest(t *testing.T, ctx context.Context, f filer.F
|
|||
for _, e := range entriesBeforeDelete {
|
||||
names = append(names, e.Name())
|
||||
}
|
||||
assert.Equal(t, names, []string{"file1", "file2", "subdir1", "subdir2"})
|
||||
assert.Equal(t, []string{"file1", "file2", "subdir1", "subdir2"}, names)
|
||||
|
||||
err = f.Delete(ctx, "dir")
|
||||
assert.ErrorAs(t, err, &filer.DirectoryNotEmptyError{})
|
||||
|
@ -149,13 +147,13 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
|
|||
|
||||
// Write should fail because the intermediate directory doesn't exist.
|
||||
err = f.Write(ctx, "/foo/bar", strings.NewReader(`hello world`))
|
||||
assert.True(t, errors.As(err, &filer.NoSuchDirectoryError{}))
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorAs(t, err, &filer.NoSuchDirectoryError{})
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// Read should fail because the intermediate directory doesn't yet exist.
|
||||
_, err = f.Read(ctx, "/foo/bar")
|
||||
assert.True(t, errors.As(err, &filer.FileDoesNotExistError{}))
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorAs(t, err, &filer.FileDoesNotExistError{})
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// Read should fail because the path points to a directory
|
||||
err = f.Mkdir(ctx, "/dir")
|
||||
|
@ -170,8 +168,8 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
|
|||
|
||||
// Write should fail because there is an existing file at the specified path.
|
||||
err = f.Write(ctx, "/foo/bar", strings.NewReader(`hello universe`))
|
||||
assert.True(t, errors.As(err, &filer.FileAlreadyExistsError{}))
|
||||
assert.True(t, errors.Is(err, fs.ErrExist))
|
||||
assert.ErrorAs(t, err, &filer.FileAlreadyExistsError{})
|
||||
assert.ErrorIs(t, err, fs.ErrExist)
|
||||
|
||||
// Write with OverwriteIfExists should succeed.
|
||||
err = f.Write(ctx, "/foo/bar", strings.NewReader(`hello universe`), filer.OverwriteIfExists)
|
||||
|
@ -188,7 +186,7 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
|
|||
require.NoError(t, err)
|
||||
assert.Equal(t, "foo", info.Name())
|
||||
assert.True(t, info.Mode().IsDir())
|
||||
assert.Equal(t, true, info.IsDir())
|
||||
assert.True(t, info.IsDir())
|
||||
|
||||
// Stat on a file should succeed.
|
||||
// Note: size and modification time behave differently between backends.
|
||||
|
@ -196,17 +194,17 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
|
|||
require.NoError(t, err)
|
||||
assert.Equal(t, "bar", info.Name())
|
||||
assert.True(t, info.Mode().IsRegular())
|
||||
assert.Equal(t, false, info.IsDir())
|
||||
assert.False(t, info.IsDir())
|
||||
|
||||
// Delete should fail if the file doesn't exist.
|
||||
err = f.Delete(ctx, "/doesnt_exist")
|
||||
assert.ErrorAs(t, err, &filer.FileDoesNotExistError{})
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// Stat should fail if the file doesn't exist.
|
||||
_, err = f.Stat(ctx, "/doesnt_exist")
|
||||
assert.ErrorAs(t, err, &filer.FileDoesNotExistError{})
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// Delete should succeed for file that does exist.
|
||||
err = f.Delete(ctx, "/foo/bar")
|
||||
|
@ -215,7 +213,7 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
|
|||
// Delete should fail for a non-empty directory.
|
||||
err = f.Delete(ctx, "/foo")
|
||||
assert.ErrorAs(t, err, &filer.DirectoryNotEmptyError{})
|
||||
assert.True(t, errors.Is(err, fs.ErrInvalid))
|
||||
assert.ErrorIs(t, err, fs.ErrInvalid)
|
||||
|
||||
// Delete should succeed for a non-empty directory if the DeleteRecursively flag is set.
|
||||
err = f.Delete(ctx, "/foo", filer.DeleteRecursively)
|
||||
|
@ -224,8 +222,8 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer)
|
|||
// Delete of the filer root should ALWAYS fail, otherwise subsequent writes would fail.
|
||||
// It is not in the filer's purview to delete its root directory.
|
||||
err = f.Delete(ctx, "/")
|
||||
assert.True(t, errors.As(err, &filer.CannotDeleteRootError{}))
|
||||
assert.True(t, errors.Is(err, fs.ErrInvalid))
|
||||
assert.ErrorAs(t, err, &filer.CannotDeleteRootError{})
|
||||
assert.ErrorIs(t, err, fs.ErrInvalid)
|
||||
}
|
||||
|
||||
func TestFilerReadWrite(t *testing.T) {
|
||||
|
@ -262,7 +260,7 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
|
|||
// We start with an empty directory.
|
||||
entries, err := f.ReadDir(ctx, ".")
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, entries, 0)
|
||||
assert.Empty(t, entries)
|
||||
|
||||
// Write a file.
|
||||
err = f.Write(ctx, "/hello.txt", strings.NewReader(`hello world`))
|
||||
|
@ -282,8 +280,8 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
|
|||
|
||||
// Expect an error if the path doesn't exist.
|
||||
_, err = f.ReadDir(ctx, "/dir/a/b/c/d/e")
|
||||
assert.True(t, errors.As(err, &filer.NoSuchDirectoryError{}), err)
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorAs(t, err, &filer.NoSuchDirectoryError{}, err)
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// Expect two entries in the root.
|
||||
entries, err = f.ReadDir(ctx, ".")
|
||||
|
@ -295,7 +293,7 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
|
|||
assert.False(t, entries[1].IsDir())
|
||||
info, err = entries[1].Info()
|
||||
require.NoError(t, err)
|
||||
assert.Greater(t, info.ModTime().Unix(), int64(0))
|
||||
assert.Positive(t, info.ModTime().Unix())
|
||||
|
||||
// Expect two entries in the directory.
|
||||
entries, err = f.ReadDir(ctx, "/dir")
|
||||
|
@ -307,7 +305,7 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
|
|||
assert.False(t, entries[1].IsDir())
|
||||
info, err = entries[1].Info()
|
||||
require.NoError(t, err)
|
||||
assert.Greater(t, info.ModTime().Unix(), int64(0))
|
||||
assert.Positive(t, info.ModTime().Unix())
|
||||
|
||||
// Expect a single entry in the nested path.
|
||||
entries, err = f.ReadDir(ctx, "/dir/a/b")
|
||||
|
@ -325,7 +323,7 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
|
|||
require.NoError(t, err)
|
||||
entries, err = f.ReadDir(ctx, "empty-dir")
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, entries, 0)
|
||||
assert.Empty(t, entries)
|
||||
|
||||
// Expect one entry for a directory with a file in it
|
||||
err = f.Write(ctx, "dir-with-one-file/my-file.txt", strings.NewReader("abc"), filer.CreateParentDirectories)
|
||||
|
@ -333,7 +331,7 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) {
|
|||
entries, err = f.ReadDir(ctx, "dir-with-one-file")
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, entries, 1)
|
||||
assert.Equal(t, entries[0].Name(), "my-file.txt")
|
||||
assert.Equal(t, "my-file.txt", entries[0].Name())
|
||||
assert.False(t, entries[0].IsDir())
|
||||
}
|
||||
|
||||
|
@ -459,7 +457,7 @@ func TestFilerWorkspaceNotebook(t *testing.T) {
|
|||
// Assert uploading a second time fails due to overwrite mode missing
|
||||
err = f.Write(ctx, tc.name, strings.NewReader(tc.content2))
|
||||
require.ErrorIs(t, err, fs.ErrExist)
|
||||
assert.Regexp(t, regexp.MustCompile(`file already exists: .*/`+tc.nameWithoutExt+`$`), err.Error())
|
||||
assert.Regexp(t, `file already exists: .*/`+tc.nameWithoutExt+`$`, err.Error())
|
||||
|
||||
// Try uploading the notebook again with overwrite flag. This time it should succeed.
|
||||
err = f.Write(ctx, tc.name, strings.NewReader(tc.content2), filer.OverwriteIfExists)
|
||||
|
|
|
@ -42,7 +42,7 @@ func TestStoreAndLookup(t *testing.T) {
|
|||
tok, err := l.Lookup("x")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "abc", tok.AccessToken)
|
||||
assert.Equal(t, 2, len(l.Tokens))
|
||||
assert.Len(t, l.Tokens, 2)
|
||||
|
||||
_, err = l.Lookup("z")
|
||||
assert.Equal(t, ErrNotConfigured, err)
|
||||
|
|
|
@ -216,7 +216,7 @@ func TestSaveToProfile_ClearingPreviousProfile(t *testing.T) {
|
|||
|
||||
dlft, err := file.GetSection("DEFAULT")
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, dlft.KeysHash(), 0)
|
||||
assert.Empty(t, dlft.KeysHash())
|
||||
|
||||
abc, err := file.GetSection("abc")
|
||||
assert.NoError(t, err)
|
||||
|
|
|
@ -11,10 +11,10 @@ import (
|
|||
)
|
||||
|
||||
func TestProfileCloud(t *testing.T) {
|
||||
assert.Equal(t, Profile{Host: "https://dbc-XXXXXXXX-YYYY.cloud.databricks.com"}.Cloud(), "AWS")
|
||||
assert.Equal(t, Profile{Host: "https://adb-xxx.y.azuredatabricks.net/"}.Cloud(), "Azure")
|
||||
assert.Equal(t, Profile{Host: "https://workspace.gcp.databricks.com/"}.Cloud(), "GCP")
|
||||
assert.Equal(t, Profile{Host: "https://some.invalid.host.com/"}.Cloud(), "AWS")
|
||||
assert.Equal(t, "AWS", Profile{Host: "https://dbc-XXXXXXXX-YYYY.cloud.databricks.com"}.Cloud())
|
||||
assert.Equal(t, "Azure", Profile{Host: "https://adb-xxx.y.azuredatabricks.net/"}.Cloud())
|
||||
assert.Equal(t, "GCP", Profile{Host: "https://workspace.gcp.databricks.com/"}.Cloud())
|
||||
assert.Equal(t, "AWS", Profile{Host: "https://some.invalid.host.com/"}.Cloud())
|
||||
}
|
||||
|
||||
func TestProfilesSearchCaseInsensitive(t *testing.T) {
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package errs
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
|
@ -14,13 +13,13 @@ func TestFromManyErrors(t *testing.T) {
|
|||
e3 := fmt.Errorf("Error 3")
|
||||
err := FromMany(e1, e2, e3)
|
||||
|
||||
assert.True(t, errors.Is(err, e1))
|
||||
assert.True(t, errors.Is(err, e2))
|
||||
assert.True(t, errors.Is(err, e3))
|
||||
assert.ErrorIs(t, err, e1)
|
||||
assert.ErrorIs(t, err, e2)
|
||||
assert.ErrorIs(t, err, e3)
|
||||
|
||||
assert.Equal(t, err.Error(), `Error 1
|
||||
assert.Equal(t, `Error 1
|
||||
Error 2
|
||||
Error 3`)
|
||||
Error 3`, err.Error())
|
||||
}
|
||||
|
||||
func TestFromManyErrorsWihtNil(t *testing.T) {
|
||||
|
@ -29,9 +28,9 @@ func TestFromManyErrorsWihtNil(t *testing.T) {
|
|||
e3 := fmt.Errorf("Error 3")
|
||||
err := FromMany(e1, e2, e3)
|
||||
|
||||
assert.True(t, errors.Is(err, e1))
|
||||
assert.True(t, errors.Is(err, e3))
|
||||
assert.ErrorIs(t, err, e1)
|
||||
assert.ErrorIs(t, err, e3)
|
||||
|
||||
assert.Equal(t, err.Error(), `Error 1
|
||||
Error 3`)
|
||||
assert.Equal(t, `Error 1
|
||||
Error 3`, err.Error())
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ func TestFilerCompleterSetsPrefix(t *testing.T) {
|
|||
|
||||
assert.Equal(t, []string{"dbfs:/dir/dirA/", "dbfs:/dir/dirB/"}, completions)
|
||||
assert.Equal(t, cobra.ShellCompDirectiveNoSpace, directive)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFilerCompleterReturnsNestedDirs(t *testing.T) {
|
||||
|
@ -46,7 +46,7 @@ func TestFilerCompleterReturnsNestedDirs(t *testing.T) {
|
|||
|
||||
assert.Equal(t, []string{"dir/dirA/", "dir/dirB/"}, completions)
|
||||
assert.Equal(t, cobra.ShellCompDirectiveNoSpace, directive)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFilerCompleterReturnsAdjacentDirs(t *testing.T) {
|
||||
|
@ -55,7 +55,7 @@ func TestFilerCompleterReturnsAdjacentDirs(t *testing.T) {
|
|||
|
||||
assert.Equal(t, []string{"dir/dirA/", "dir/dirB/"}, completions)
|
||||
assert.Equal(t, cobra.ShellCompDirectiveNoSpace, directive)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFilerCompleterReturnsNestedDirsAndFiles(t *testing.T) {
|
||||
|
@ -64,7 +64,7 @@ func TestFilerCompleterReturnsNestedDirsAndFiles(t *testing.T) {
|
|||
|
||||
assert.Equal(t, []string{"dir/dirA/", "dir/dirB/", "dir/fileA"}, completions)
|
||||
assert.Equal(t, cobra.ShellCompDirectiveNoSpace, directive)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFilerCompleterAddsDbfsPath(t *testing.T) {
|
||||
|
@ -78,7 +78,7 @@ func TestFilerCompleterAddsDbfsPath(t *testing.T) {
|
|||
|
||||
assert.Equal(t, []string{"dir/dirA/", "dir/dirB/", "dbfs:/"}, completions)
|
||||
assert.Equal(t, cobra.ShellCompDirectiveNoSpace, directive)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFilerCompleterWindowsSeparator(t *testing.T) {
|
||||
|
@ -92,7 +92,7 @@ func TestFilerCompleterWindowsSeparator(t *testing.T) {
|
|||
|
||||
assert.Equal(t, []string{"dir\\dirA\\", "dir\\dirB\\", "dbfs:/"}, completions)
|
||||
assert.Equal(t, cobra.ShellCompDirectiveNoSpace, directive)
|
||||
assert.Nil(t, err)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestFilerCompleterNoCompletions(t *testing.T) {
|
||||
|
|
|
@ -63,7 +63,7 @@ func TestFsOpenFile(t *testing.T) {
|
|||
assert.Equal(t, "fileA", info.Name())
|
||||
assert.Equal(t, int64(3), info.Size())
|
||||
assert.Equal(t, fs.FileMode(0), info.Mode())
|
||||
assert.Equal(t, false, info.IsDir())
|
||||
assert.False(t, info.IsDir())
|
||||
|
||||
// Read until closed.
|
||||
b := make([]byte, 3)
|
||||
|
@ -91,7 +91,7 @@ func TestFsOpenDir(t *testing.T) {
|
|||
info, err := fakeFile.Stat()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "root", info.Name())
|
||||
assert.Equal(t, true, info.IsDir())
|
||||
assert.True(t, info.IsDir())
|
||||
|
||||
de, ok := fakeFile.(fs.ReadDirFile)
|
||||
require.True(t, ok)
|
||||
|
|
|
@ -52,7 +52,7 @@ func TestGlobFileset(t *testing.T) {
|
|||
|
||||
files, err = g.Files()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(files), 0)
|
||||
require.Empty(t, files)
|
||||
}
|
||||
|
||||
func TestGlobFilesetWithRelativeRoot(t *testing.T) {
|
||||
|
|
|
@ -123,12 +123,12 @@ func TestJsonUnmarshalForRequest(t *testing.T) {
|
|||
assert.Equal(t, "new job", r.NewSettings.Name)
|
||||
assert.Equal(t, 0, r.NewSettings.TimeoutSeconds)
|
||||
assert.Equal(t, 1, r.NewSettings.MaxConcurrentRuns)
|
||||
assert.Equal(t, 1, len(r.NewSettings.Tasks))
|
||||
assert.Len(t, r.NewSettings.Tasks, 1)
|
||||
assert.Equal(t, "new task", r.NewSettings.Tasks[0].TaskKey)
|
||||
assert.Equal(t, 0, r.NewSettings.Tasks[0].TimeoutSeconds)
|
||||
assert.Equal(t, 0, r.NewSettings.Tasks[0].MaxRetries)
|
||||
assert.Equal(t, 0, r.NewSettings.Tasks[0].MinRetryIntervalMillis)
|
||||
assert.Equal(t, true, r.NewSettings.Tasks[0].RetryOnTimeout)
|
||||
assert.True(t, r.NewSettings.Tasks[0].RetryOnTimeout)
|
||||
}
|
||||
|
||||
const incorrectJsonData = `{
|
||||
|
@ -280,8 +280,8 @@ func TestJsonUnmarshalForRequestWithForceSendFields(t *testing.T) {
|
|||
assert.NoError(t, diags.Error())
|
||||
assert.Empty(t, diags)
|
||||
|
||||
assert.Equal(t, false, r.NewSettings.NotificationSettings.NoAlertForSkippedRuns)
|
||||
assert.Equal(t, false, r.NewSettings.NotificationSettings.NoAlertForCanceledRuns)
|
||||
assert.False(t, r.NewSettings.NotificationSettings.NoAlertForSkippedRuns)
|
||||
assert.False(t, r.NewSettings.NotificationSettings.NoAlertForCanceledRuns)
|
||||
assert.NotContains(t, r.NewSettings.NotificationSettings.ForceSendFields, "NoAlertForSkippedRuns")
|
||||
assert.Contains(t, r.NewSettings.NotificationSettings.ForceSendFields, "NoAlertForCanceledRuns")
|
||||
}
|
||||
|
|
|
@ -33,6 +33,6 @@ func TestFindDirWithLeaf(t *testing.T) {
|
|||
{
|
||||
out, err := FindDirWithLeaf(root, "this-leaf-doesnt-exist-anywhere")
|
||||
assert.ErrorIs(t, err, os.ErrNotExist)
|
||||
assert.Equal(t, out, "")
|
||||
assert.Equal(t, "", out)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ func TestTemplateFromString(t *testing.T) {
|
|||
v, err = fromString("1.1", NumberType)
|
||||
assert.NoError(t, err)
|
||||
// Floating point conversions are not perfect
|
||||
assert.True(t, (v.(float64)-1.1) < 0.000001)
|
||||
assert.Less(t, (v.(float64) - 1.1), 0.000001)
|
||||
|
||||
v, err = fromString("12345", IntegerType)
|
||||
assert.NoError(t, err)
|
||||
|
@ -104,7 +104,7 @@ func TestTemplateFromString(t *testing.T) {
|
|||
|
||||
v, err = fromString("123", NumberType)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, float64(123), v)
|
||||
assert.InDelta(t, float64(123), v.(float64), 0.0001)
|
||||
|
||||
_, err = fromString("qrt", ArrayType)
|
||||
assert.EqualError(t, err, "cannot parse string as object of type array. Value of string: \"qrt\"")
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package notebook
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -53,7 +52,7 @@ func TestDetectCallsDetectJupyter(t *testing.T) {
|
|||
|
||||
func TestDetectUnknownExtension(t *testing.T) {
|
||||
_, _, err := Detect("./testdata/doesntexist.foobar")
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
nb, _, err := Detect("./testdata/unknown_extension.foobar")
|
||||
require.NoError(t, err)
|
||||
|
@ -62,7 +61,7 @@ func TestDetectUnknownExtension(t *testing.T) {
|
|||
|
||||
func TestDetectNoExtension(t *testing.T) {
|
||||
_, _, err := Detect("./testdata/doesntexist")
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
nb, _, err := Detect("./testdata/no_extension")
|
||||
require.NoError(t, err)
|
||||
|
|
|
@ -95,7 +95,7 @@ func TestBackgroundNoStdin(t *testing.T) {
|
|||
func TestBackgroundFails(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
_, err := Background(ctx, []string{"ls", "/dev/null/x"})
|
||||
assert.NotNil(t, err)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestBackgroundFailsOnOption(t *testing.T) {
|
||||
|
|
|
@ -27,7 +27,7 @@ func TestForwardedFails(t *testing.T) {
|
|||
err := Forwarded(ctx, []string{
|
||||
"_non_existent_",
|
||||
}, strings.NewReader("abc\n"), &buf, &buf)
|
||||
assert.NotNil(t, err)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
func TestForwardedFailsOnStdinPipe(t *testing.T) {
|
||||
|
@ -39,5 +39,5 @@ func TestForwardedFailsOnStdinPipe(t *testing.T) {
|
|||
c.Stdin = strings.NewReader("x")
|
||||
return nil
|
||||
})
|
||||
assert.NotNil(t, err)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ func TestWorksWithLibsEnv(t *testing.T) {
|
|||
vars := cmd.Environ()
|
||||
sort.Strings(vars)
|
||||
|
||||
assert.True(t, len(vars) >= 2)
|
||||
assert.GreaterOrEqual(t, len(vars), 2)
|
||||
assert.Equal(t, "CCC=DDD", vars[0])
|
||||
assert.Equal(t, "EEE=FFF", vars[1])
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ func TestAtLeastOnePythonInstalled(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
a := all.Latest()
|
||||
t.Logf("latest is: %s", a)
|
||||
assert.True(t, len(all) > 0)
|
||||
assert.NotEmpty(t, all)
|
||||
}
|
||||
|
||||
func TestNoInterpretersFound(t *testing.T) {
|
||||
|
|
|
@ -51,7 +51,7 @@ func TestDiff(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
change, err := state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 0)
|
||||
assert.Empty(t, change.delete)
|
||||
assert.Len(t, change.put, 2)
|
||||
assert.Contains(t, change.put, "hello.txt")
|
||||
assert.Contains(t, change.put, "world.txt")
|
||||
|
@ -67,7 +67,7 @@ func TestDiff(t *testing.T) {
|
|||
change, err = state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Len(t, change.delete, 0)
|
||||
assert.Empty(t, change.delete)
|
||||
assert.Len(t, change.put, 1)
|
||||
assert.Contains(t, change.put, "world.txt")
|
||||
assertKeysOfMap(t, state.LastModifiedTimes, []string{"hello.txt", "world.txt"})
|
||||
|
@ -82,7 +82,7 @@ func TestDiff(t *testing.T) {
|
|||
change, err = state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 1)
|
||||
assert.Len(t, change.put, 0)
|
||||
assert.Empty(t, change.put)
|
||||
assert.Contains(t, change.delete, "hello.txt")
|
||||
assertKeysOfMap(t, state.LastModifiedTimes, []string{"world.txt"})
|
||||
assert.Equal(t, map[string]string{"world.txt": "world.txt"}, state.LocalToRemoteNames)
|
||||
|
@ -145,8 +145,8 @@ func TestFolderDiff(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
change, err := state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 0)
|
||||
assert.Len(t, change.rmdir, 0)
|
||||
assert.Empty(t, change.delete)
|
||||
assert.Empty(t, change.rmdir)
|
||||
assert.Len(t, change.mkdir, 1)
|
||||
assert.Len(t, change.put, 1)
|
||||
assert.Contains(t, change.mkdir, "foo")
|
||||
|
@ -159,8 +159,8 @@ func TestFolderDiff(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 1)
|
||||
assert.Len(t, change.rmdir, 1)
|
||||
assert.Len(t, change.mkdir, 0)
|
||||
assert.Len(t, change.put, 0)
|
||||
assert.Empty(t, change.mkdir)
|
||||
assert.Empty(t, change.put)
|
||||
assert.Contains(t, change.delete, "foo/bar")
|
||||
assert.Contains(t, change.rmdir, "foo")
|
||||
}
|
||||
|
@ -189,7 +189,7 @@ func TestPythonNotebookDiff(t *testing.T) {
|
|||
foo.Overwrite(t, "# Databricks notebook source\nprint(\"abc\")")
|
||||
change, err := state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 0)
|
||||
assert.Empty(t, change.delete)
|
||||
assert.Len(t, change.put, 1)
|
||||
assert.Contains(t, change.put, "foo.py")
|
||||
assertKeysOfMap(t, state.LastModifiedTimes, []string{"foo.py"})
|
||||
|
@ -233,9 +233,9 @@ func TestPythonNotebookDiff(t *testing.T) {
|
|||
change, err = state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 1)
|
||||
assert.Len(t, change.put, 0)
|
||||
assert.Empty(t, change.put)
|
||||
assert.Contains(t, change.delete, "foo")
|
||||
assert.Len(t, state.LastModifiedTimes, 0)
|
||||
assert.Empty(t, state.LastModifiedTimes)
|
||||
assert.Equal(t, map[string]string{}, state.LocalToRemoteNames)
|
||||
assert.Equal(t, map[string]string{}, state.RemoteToLocalNames)
|
||||
}
|
||||
|
@ -264,7 +264,7 @@ func TestErrorWhenIdenticalRemoteName(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
change, err := state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 0)
|
||||
assert.Empty(t, change.delete)
|
||||
assert.Len(t, change.put, 2)
|
||||
assert.Contains(t, change.put, "foo.py")
|
||||
assert.Contains(t, change.put, "foo")
|
||||
|
@ -300,7 +300,7 @@ func TestNoErrorRenameWithIdenticalRemoteName(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
change, err := state.diff(ctx, files)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, change.delete, 0)
|
||||
assert.Empty(t, change.delete)
|
||||
assert.Len(t, change.put, 1)
|
||||
assert.Contains(t, change.put, "foo.py")
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ func TestGetFileSet(t *testing.T) {
|
|||
|
||||
fileList, err := s.GetFileList(ctx)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(fileList), 10)
|
||||
require.Len(t, fileList, 10)
|
||||
|
||||
inc, err = fileset.NewGlobSet(root, []string{})
|
||||
require.NoError(t, err)
|
||||
|
@ -77,7 +77,7 @@ func TestGetFileSet(t *testing.T) {
|
|||
|
||||
fileList, err = s.GetFileList(ctx)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(fileList), 2)
|
||||
require.Len(t, fileList, 2)
|
||||
|
||||
inc, err = fileset.NewGlobSet(root, []string{"./.databricks/*.go"})
|
||||
require.NoError(t, err)
|
||||
|
@ -95,7 +95,7 @@ func TestGetFileSet(t *testing.T) {
|
|||
|
||||
fileList, err = s.GetFileList(ctx)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(fileList), 11)
|
||||
require.Len(t, fileList, 11)
|
||||
}
|
||||
|
||||
func TestRecursiveExclude(t *testing.T) {
|
||||
|
@ -125,7 +125,7 @@ func TestRecursiveExclude(t *testing.T) {
|
|||
|
||||
fileList, err := s.GetFileList(ctx)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(fileList), 7)
|
||||
require.Len(t, fileList, 7)
|
||||
}
|
||||
|
||||
func TestNegateExclude(t *testing.T) {
|
||||
|
@ -155,6 +155,6 @@ func TestNegateExclude(t *testing.T) {
|
|||
|
||||
fileList, err := s.GetFileList(ctx)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(fileList), 1)
|
||||
require.Equal(t, fileList[0].Relative, "test/sub1/sub2/h.txt")
|
||||
require.Len(t, fileList, 1)
|
||||
require.Equal(t, "test/sub1/sub2/h.txt", fileList[0].Relative)
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ func TestTemplateConfigAssignValuesFromFile(t *testing.T) {
|
|||
err = c.assignValuesFromFile(filepath.Join(testDir, "config.json"))
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, int64(1), c.values["int_val"])
|
||||
assert.Equal(t, float64(2), c.values["float_val"])
|
||||
assert.InDelta(t, float64(2), c.values["float_val"].(float64), 0.0001)
|
||||
assert.Equal(t, true, c.values["bool_val"])
|
||||
assert.Equal(t, "hello", c.values["string_val"])
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ func TestTemplateConfigAssignValuesFromFileDoesNotOverwriteExistingConfigs(t *te
|
|||
err = c.assignValuesFromFile(filepath.Join(testDir, "config.json"))
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, int64(1), c.values["int_val"])
|
||||
assert.Equal(t, float64(2), c.values["float_val"])
|
||||
assert.InDelta(t, float64(2), c.values["float_val"].(float64), 0.0001)
|
||||
assert.Equal(t, true, c.values["bool_val"])
|
||||
assert.Equal(t, "this-is-not-overwritten", c.values["string_val"])
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ func TestTemplateConfigAssignValuesFromDefaultValues(t *testing.T) {
|
|||
err = c.assignDefaultValues(r)
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, int64(123), c.values["int_val"])
|
||||
assert.Equal(t, float64(123), c.values["float_val"])
|
||||
assert.InDelta(t, float64(123), c.values["float_val"].(float64), 0.0001)
|
||||
assert.Equal(t, true, c.values["bool_val"])
|
||||
assert.Equal(t, "hello", c.values["string_val"])
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ func TestTemplateConfigAssignValuesFromTemplatedDefaultValues(t *testing.T) {
|
|||
err = c.assignDefaultValues(r)
|
||||
if assert.NoError(t, err) {
|
||||
assert.Equal(t, int64(123), c.values["int_val"])
|
||||
assert.Equal(t, float64(123), c.values["float_val"])
|
||||
assert.InDelta(t, float64(123), c.values["float_val"].(float64), 0.0001)
|
||||
assert.Equal(t, true, c.values["bool_val"])
|
||||
assert.Equal(t, "world", c.values["string_val"])
|
||||
}
|
||||
|
|
|
@ -86,7 +86,7 @@ func TestTemplateRandIntFunction(t *testing.T) {
|
|||
assert.Len(t, r.files, 1)
|
||||
randInt, err := strconv.Atoi(strings.TrimSpace(string(r.files[0].(*inMemoryFile).content)))
|
||||
assert.Less(t, randInt, 10)
|
||||
assert.Empty(t, err)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestTemplateUuidFunction(t *testing.T) {
|
||||
|
|
|
@ -434,7 +434,7 @@ func TestRendererSkipAllFilesInCurrentDirectory(t *testing.T) {
|
|||
entries, err := os.ReadDir(tmpDir)
|
||||
require.NoError(t, err)
|
||||
// Assert none of the files are persisted to disk, because of {{skip "*"}}
|
||||
assert.Len(t, entries, 0)
|
||||
assert.Empty(t, entries)
|
||||
}
|
||||
|
||||
func TestRendererSkipPatternsAreRelativeToFileDirectory(t *testing.T) {
|
||||
|
@ -588,8 +588,8 @@ func TestRendererNonTemplatesAreCreatedAsCopyFiles(t *testing.T) {
|
|||
assert.NoError(t, err)
|
||||
|
||||
assert.Len(t, r.files, 1)
|
||||
assert.Equal(t, r.files[0].(*copyFile).srcPath, "not-a-template")
|
||||
assert.Equal(t, r.files[0].RelPath(), "not-a-template")
|
||||
assert.Equal(t, "not-a-template", r.files[0].(*copyFile).srcPath)
|
||||
assert.Equal(t, "not-a-template", r.files[0].RelPath())
|
||||
}
|
||||
|
||||
func TestRendererFileTreeRendering(t *testing.T) {
|
||||
|
@ -609,7 +609,7 @@ func TestRendererFileTreeRendering(t *testing.T) {
|
|||
|
||||
// Assert in memory representation is created.
|
||||
assert.Len(t, r.files, 1)
|
||||
assert.Equal(t, r.files[0].RelPath(), "my_directory/my_file")
|
||||
assert.Equal(t, "my_directory/my_file", r.files[0].RelPath())
|
||||
|
||||
out, err := filer.NewLocalClient(tmpDir)
|
||||
require.NoError(t, err)
|
||||
|
|
|
@ -2,7 +2,6 @@ package vfs
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -42,7 +41,7 @@ func TestFilerPath(t *testing.T) {
|
|||
|
||||
// Open non-existent file.
|
||||
_, err = p.Open("doesntexist_test.go")
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// Stat self.
|
||||
s, err = p.Stat("filer_test.go")
|
||||
|
@ -52,7 +51,7 @@ func TestFilerPath(t *testing.T) {
|
|||
|
||||
// Stat non-existent file.
|
||||
_, err = p.Stat("doesntexist_test.go")
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// ReadDir self.
|
||||
entries, err := p.ReadDir(".")
|
||||
|
@ -61,7 +60,7 @@ func TestFilerPath(t *testing.T) {
|
|||
|
||||
// ReadDir non-existent directory.
|
||||
_, err = p.ReadDir("doesntexist")
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// ReadFile self.
|
||||
buf, err = p.ReadFile("filer_test.go")
|
||||
|
@ -70,7 +69,7 @@ func TestFilerPath(t *testing.T) {
|
|||
|
||||
// ReadFile non-existent file.
|
||||
_, err = p.ReadFile("doesntexist_test.go")
|
||||
assert.True(t, errors.Is(err, fs.ErrNotExist))
|
||||
assert.ErrorIs(t, err, fs.ErrNotExist)
|
||||
|
||||
// Parent self.
|
||||
pp := p.Parent()
|
||||
|
|
Loading…
Reference in New Issue