mirror of https://github.com/databricks/cli.git
Compare commits
4 Commits
386c7d3041
...
835fb05b87
Author | SHA1 | Date |
---|---|---|
|
835fb05b87 | |
|
5b84856b17 | |
|
13fa43e0f5 | |
|
23ddee8023 |
|
@ -411,5 +411,5 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
{{- define "request-body-obj" -}}
|
||||
{{- $method := .Method -}}
|
||||
{{- $field := .Field -}}
|
||||
{{$method.CamelName}}Req{{ if (and $method.RequestBodyField (not $field.IsPath)) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}}
|
||||
{{$method.CamelName}}Req{{ if (and $method.RequestBodyField (and (not $field.IsPath) (not $field.IsQuery))) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}}
|
||||
{{- end -}}
|
||||
|
|
13
Makefile
13
Makefile
|
@ -30,11 +30,16 @@ vendor:
|
|||
@echo "✓ Filling vendor folder with library code ..."
|
||||
@go mod vendor
|
||||
|
||||
integration:
|
||||
gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
||||
|
||||
schema:
|
||||
@echo "✓ Generating json-schema ..."
|
||||
@go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
||||
|
||||
.PHONY: lint lintcheck test testonly coverage build snapshot vendor integration schema
|
||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
||||
|
||||
integration:
|
||||
$(INTEGRATION)
|
||||
|
||||
integration-short:
|
||||
$(INTEGRATION) -short
|
||||
|
||||
.PHONY: lint lintcheck test testonly coverage build snapshot vendor schema integration integration-short
|
||||
|
|
|
@ -12,12 +12,12 @@ import (
|
|||
)
|
||||
|
||||
func TestDeployBundleWithCluster(t *testing.T) {
|
||||
ctx, wt := acc.WorkspaceTest(t)
|
||||
|
||||
if testutil.IsAWSCloud(wt) {
|
||||
if testutil.GetCloud(t) == testutil.AWS {
|
||||
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
|
||||
}
|
||||
|
||||
ctx, wt := acc.WorkspaceTest(t)
|
||||
|
||||
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
|
||||
uniqueId := uuid.New().String()
|
||||
root := initTestTemplate(t, ctx, "clusters", map[string]any{
|
||||
|
@ -44,6 +44,11 @@ func TestDeployBundleWithCluster(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
require.NotNil(t, cluster)
|
||||
|
||||
if testing.Short() {
|
||||
t.Log("Skip the job run in short mode")
|
||||
return
|
||||
}
|
||||
|
||||
out, err := runResource(t, ctx, root, "foo")
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, out, "Hello World!")
|
||||
|
|
|
@ -29,6 +29,11 @@ func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonW
|
|||
destroyBundle(t, ctx, bundleRoot)
|
||||
})
|
||||
|
||||
if testing.Short() {
|
||||
t.Log("Skip the job run in short mode")
|
||||
return
|
||||
}
|
||||
|
||||
out, err := runResource(t, ctx, bundleRoot, "some_other_job")
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, out, "Hello from my func")
|
||||
|
@ -51,9 +56,7 @@ func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) {
|
||||
_, wt := acc.WorkspaceTest(t)
|
||||
|
||||
if testutil.IsAWSCloud(wt) {
|
||||
if testutil.GetCloud(t) == testutil.AWS {
|
||||
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,11 @@ func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, arti
|
|||
destroyBundle(t, ctx, bundleRoot)
|
||||
})
|
||||
|
||||
if testing.Short() {
|
||||
t.Log("Skip the job run in short mode")
|
||||
return
|
||||
}
|
||||
|
||||
out, err := runResource(t, ctx, bundleRoot, "jar_job")
|
||||
require.NoError(t, err)
|
||||
require.Contains(t, out, "Hello from Jar!")
|
||||
|
|
|
@ -58,7 +58,3 @@ func GetCloud(t TestingT) Cloud {
|
|||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func IsAWSCloud(t TestingT) bool {
|
||||
return GetCloud(t) == AWS
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue