Compare commits

...

4 Commits

Author SHA1 Message Date
Pieter Noordhuis 835fb05b87
Merge branch 'main' into feat/custom-annotations-json-schema 2024-12-18 10:59:36 +01:00
Andrew Nester 5b84856b17
Correctly handle required query params in CLI generation (#2027)
## Changes
If there's required query params, it is a top-level field of request
object and not a field of nested request body.

This is needed for upcoming changes from OpenAPI spec changes where such
query parameters is introduced.

No changes after regenerating CLI with current spec and the fix (appears
we haven't had such params before)
2024-12-17 20:05:42 +01:00
Pieter Noordhuis 13fa43e0f5
Remove superfluous helper (#2028)
## Changes

There was only one helper for AWS and not the other clouds. Found this
when looking through double calls to `acc.WorkspaceTest()` (see
`TestPythonWheelTaskDeployAndRunOnInteractiveCluster`).

## Tests

n/a
2024-12-17 17:34:09 +00:00
Pieter Noordhuis 23ddee8023
Skip job runs during integration testing for PRs (#2024)
## Changes

A small subset of tests trigger cluster creation to run jobs. These
tests comprise a substantial amount of the total integration test
runtime. We can skip them on PRs and only run them on the main branch.

## Tests

Confirmed the short runtime is ~20 mins.
2024-12-17 17:16:58 +00:00
6 changed files with 30 additions and 16 deletions

View File

@ -411,5 +411,5 @@ func new{{.PascalName}}() *cobra.Command {
{{- define "request-body-obj" -}}
{{- $method := .Method -}}
{{- $field := .Field -}}
{{$method.CamelName}}Req{{ if (and $method.RequestBodyField (not $field.IsPath)) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}}
{{$method.CamelName}}Req{{ if (and $method.RequestBodyField (and (not $field.IsPath) (not $field.IsQuery))) }}.{{$method.RequestBodyField.PascalName}}{{end}}.{{$field.PascalName}}
{{- end -}}

View File

@ -30,11 +30,16 @@ vendor:
@echo "✓ Filling vendor folder with library code ..."
@go mod vendor
integration:
gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
schema:
@echo "✓ Generating json-schema ..."
@go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
.PHONY: lint lintcheck test testonly coverage build snapshot vendor integration schema
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
integration:
$(INTEGRATION)
integration-short:
$(INTEGRATION) -short
.PHONY: lint lintcheck test testonly coverage build snapshot vendor schema integration integration-short

View File

@ -12,12 +12,12 @@ import (
)
func TestDeployBundleWithCluster(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt) {
if testutil.GetCloud(t) == testutil.AWS {
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
}
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root := initTestTemplate(t, ctx, "clusters", map[string]any{
@ -44,6 +44,11 @@ func TestDeployBundleWithCluster(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, cluster)
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, root, "foo")
require.NoError(t, err)
require.Contains(t, out, "Hello World!")

View File

@ -29,6 +29,11 @@ func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonW
destroyBundle(t, ctx, bundleRoot)
})
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, bundleRoot, "some_other_job")
require.NoError(t, err)
require.Contains(t, out, "Hello from my func")
@ -51,9 +56,7 @@ func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) {
}
func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) {
_, wt := acc.WorkspaceTest(t)
if testutil.IsAWSCloud(wt) {
if testutil.GetCloud(t) == testutil.AWS {
t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters")
}

View File

@ -30,6 +30,11 @@ func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, arti
destroyBundle(t, ctx, bundleRoot)
})
if testing.Short() {
t.Log("Skip the job run in short mode")
return
}
out, err := runResource(t, ctx, bundleRoot, "jar_job")
require.NoError(t, err)
require.Contains(t, out, "Hello from Jar!")

View File

@ -58,7 +58,3 @@ func GetCloud(t TestingT) Cloud {
}
return -1
}
func IsAWSCloud(t TestingT) bool {
return GetCloud(t) == AWS
}