Merge branch 'main' into feat/custom-annotations-json-schema

This commit is contained in:
Pieter Noordhuis 2024-12-17 11:47:24 +01:00 committed by GitHub
commit 8a33fb3e95
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
403 changed files with 4347 additions and 2468 deletions

View File

@ -1 +1 @@
f2385add116e3716c8a90a0b68e204deb40f996c
7016dcbf2e011459416cf408ce21143bcc4b3a25

8
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,8 @@
# Enable gofumpt and goimports in golangci-lint (#1999)
2e018cfaec200a02ee2bd5b389e7da3c6f15f460
# Enable errcheck everywhere and fix or silent remaining issues (#1987)
8d5351c1c3d7befda4baae5d6adb99367aa50b3c
# Add error checking in tests and enable errcheck there (#1980)
1b2be1b2cb4b7909df2a8ad4cb6a0f43e8fcf0c6

3
.gitattributes vendored
View File

@ -37,6 +37,9 @@ cmd/workspace/apps/apps.go linguist-generated=true
cmd/workspace/artifact-allowlists/artifact-allowlists.go linguist-generated=true
cmd/workspace/automatic-cluster-update/automatic-cluster-update.go linguist-generated=true
cmd/workspace/catalogs/catalogs.go linguist-generated=true
cmd/workspace/clean-room-assets/clean-room-assets.go linguist-generated=true
cmd/workspace/clean-room-task-runs/clean-room-task-runs.go linguist-generated=true
cmd/workspace/clean-rooms/clean-rooms.go linguist-generated=true
cmd/workspace/cluster-policies/cluster-policies.go linguist-generated=true
cmd/workspace/clusters/clusters.go linguist-generated=true
cmd/workspace/cmd.go linguist-generated=true

View File

@ -4,12 +4,19 @@ linters:
- bodyclose
- errcheck
- gosimple
#- govet
- govet
- ineffassign
- staticcheck
- unused
- gofmt
- gofumpt
- goimports
linters-settings:
govet:
enable-all: true
disable:
- fieldalignment
- shadow
gofmt:
rewrite-rules:
- pattern: 'a[b:len(a)]'
@ -22,5 +29,10 @@ linters-settings:
- (*github.com/spf13/cobra.Command).MarkFlagRequired
- (*github.com/spf13/pflag.FlagSet).MarkDeprecated
- (*github.com/spf13/pflag.FlagSet).MarkHidden
gofumpt:
module-path: github.com/databricks/cli
extra-rules: true
#goimports:
# local-prefixes: github.com/databricks/cli
issues:
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/

View File

@ -7,11 +7,14 @@
"go.lintFlags": [
"--fast"
],
"go.useLanguageServer": true,
"gopls": {
"formatting.gofumpt": true
},
"files.trimTrailingWhitespace": true,
"files.insertFinalNewline": true,
"files.trimFinalNewlines": true,
"python.envFile": "${workspaceRoot}/.env",
"databricks.python.envFile": "${workspaceFolder}/.env",
"python.analysis.stubPath": ".vscode",
"jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\<codecell\\>|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])",
"jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------"

View File

@ -7,13 +7,13 @@ fmt:
@gofmt -w $(shell find . -type f -name '*.go' -not -path "./vendor/*")
lint: vendor
@echo "✓ Linting source code with https://golangci-lint.run/ (with --fix)..."
@golangci-lint run --fix ./...
lintcheck: vendor
@echo "✓ Linting source code with https://golangci-lint.run/ ..."
@golangci-lint run ./...
lintfix: vendor
@echo "✓ Linting source code with 'golangci-lint run --fix' ..."
@golangci-lint run --fix ./...
test: lint testonly
testonly:
@ -37,10 +37,10 @@ vendor:
@go mod vendor
integration:
gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./internal/..." -- -run "TestAcc.*" -parallel 4 -timeout=2h
gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
schema:
@echo "✓ Generating json-schema ..."
@go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
.PHONY: fmt lint lintfix test testonly coverage build snapshot vendor integration schema
.PHONY: fmt lint lintcheck test testonly coverage build snapshot vendor integration schema

View File

@ -3,7 +3,6 @@ package artifacts
import (
"context"
"fmt"
"slices"
"github.com/databricks/cli/bundle"

View File

@ -13,8 +13,7 @@ func DetectPackages() bundle.Mutator {
return &autodetect{}
}
type autodetect struct {
}
type autodetect struct{}
func (m *autodetect) Name() string {
return "artifacts.DetectPackages"

View File

@ -96,7 +96,6 @@ func (m *expandGlobs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost
// Set the expanded globs back into the configuration.
return dyn.SetByPath(v, base, dyn.V(output))
})
if err != nil {
return diag.FromErr(err)
}

View File

@ -15,8 +15,7 @@ import (
"github.com/databricks/cli/libs/log"
)
type detectPkg struct {
}
type detectPkg struct{}
func DetectPackage() bundle.Mutator {
return &detectPkg{}
@ -42,7 +41,7 @@ func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
return nil
}
log.Infof(ctx, fmt.Sprintf("Found Python wheel project at %s", b.BundleRootPath))
log.Infof(ctx, "Found Python wheel project at %s", b.BundleRootPath)
module := extractModuleName(setupPy)
if b.Config.Artifacts == nil {

View File

@ -186,7 +186,7 @@ func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error)
// Make directory if it doesn't exist yet.
dir := filepath.Join(parts...)
err := os.MkdirAll(dir, 0700)
err := os.MkdirAll(dir, 0o700)
if err != nil {
return "", err
}
@ -203,7 +203,7 @@ func (b *Bundle) InternalDir(ctx context.Context) (string, error) {
}
dir := filepath.Join(cacheDir, internalFolder)
err = os.MkdirAll(dir, 0700)
err = os.MkdirAll(dir, 0o700)
if err != nil {
return dir, err
}

View File

@ -47,8 +47,10 @@ type PyDABs struct {
Import []string `json:"import,omitempty"`
}
type Command string
type ScriptHook string
type (
Command string
ScriptHook string
)
// These hook names are subject to change and currently experimental
const (

View File

@ -6,8 +6,10 @@ import (
"github.com/databricks/databricks-sdk-go/service/jobs"
)
var jobOrder = yamlsaver.NewOrder([]string{"name", "job_clusters", "compute", "tasks"})
var taskOrder = yamlsaver.NewOrder([]string{"task_key", "depends_on", "existing_cluster_id", "new_cluster", "job_cluster_key"})
var (
jobOrder = yamlsaver.NewOrder([]string{"name", "job_clusters", "compute", "tasks"})
taskOrder = yamlsaver.NewOrder([]string{"task_key", "depends_on", "existing_cluster_id", "new_cluster", "job_cluster_key"})
)
func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) {
value := make(map[string]dyn.Value)

View File

@ -27,7 +27,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
var out []bundle.Mutator
// Map with files we've already seen to avoid loading them twice.
var seen = map[string]bool{}
seen := map[string]bool{}
for _, file := range config.FileNames {
seen[file] = true

View File

@ -481,5 +481,4 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment)
})
}
}

View File

@ -42,7 +42,6 @@ func rewriteComputeIdToClusterId(v dyn.Value, p dyn.Path) (dyn.Value, diag.Diagn
var diags diag.Diagnostics
computeIdPath := p.Append(dyn.Key("compute_id"))
computeId, err := dyn.GetByPath(v, computeIdPath)
// If the "compute_id" key is not set, we don't need to do anything.
if err != nil {
return v, nil

View File

@ -17,7 +17,7 @@ import (
)
func touchEmptyFile(t *testing.T, path string) {
err := os.MkdirAll(filepath.Dir(path), 0700)
err := os.MkdirAll(filepath.Dir(path), 0o700)
require.NoError(t, err)
f, err := os.Create(path)
require.NoError(t, err)

View File

@ -10,8 +10,7 @@ import (
"github.com/databricks/cli/libs/diag"
)
type initializeURLs struct {
}
type initializeURLs struct{}
// InitializeURLs makes sure the URL field of each resource is configured.
// NOTE: since this depends on an extra API call, this mutator adds some extra
@ -39,7 +38,7 @@ func (m *initializeURLs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn
return nil
}
func initializeForWorkspace(b *bundle.Bundle, orgId string, host string) error {
func initializeForWorkspace(b *bundle.Bundle, orgId, host string) error {
baseURL, err := url.Parse(host)
if err != nil {
return err

View File

@ -23,7 +23,7 @@ func (m *overrideCompute) Name() string {
func overrideJobCompute(j *resources.Job, compute string) {
for i := range j.Tasks {
var task = &j.Tasks[i]
task := &j.Tasks[i]
if task.ForEachTask != nil {
task = &task.ForEachTask.Task
@ -45,8 +45,9 @@ func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) diag.Diag
if b.Config.Bundle.ClusterId != "" {
// Overriding compute via a command-line flag for production works, but is not recommended.
diags = diags.Extend(diag.Diagnostics{{
Summary: "Setting a cluster override for a target that uses 'mode: production' is not recommended",
Detail: "It is recommended to always use the same compute for production target for consistency.",
Summary: "Setting a cluster override for a target that uses 'mode: production' is not recommended",
Detail: "It is recommended to always use the same compute for production target for consistency.",
Severity: diag.Warning,
}})
}
}

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/assert"
@ -173,6 +174,7 @@ func TestOverrideComputeModeProduction(t *testing.T) {
diags := bundle.Apply(context.Background(), b, m)
require.Len(t, diags, 1)
assert.Equal(t, "Setting a cluster override for a target that uses 'mode: production' is not recommended", diags[0].Summary)
assert.Equal(t, diag.Warning, diags[0].Severity)
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId)
}

View File

@ -95,7 +95,7 @@ func jobRewritePatterns() []jobRewritePattern {
// VisitJobPaths visits all paths in job resources and applies a function to each path.
func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) {
var err error
var newValue = value
newValue := value
for _, rewritePattern := range jobRewritePatterns() {
newValue, err = dyn.MapByPattern(newValue, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
@ -105,7 +105,6 @@ func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) {
return fn(p, rewritePattern.kind, v)
})
if err != nil {
return dyn.InvalidValue, err
}

View File

@ -57,14 +57,12 @@ func (m *prependWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di
return dyn.NewValue(fmt.Sprintf("/Workspace%s", path), v.Locations()), nil
})
if err != nil {
return dyn.InvalidValue, err
}
}
return v, nil
})
if err != nil {
return diag.FromErr(err)
}

View File

@ -30,7 +30,6 @@ type parsePythonDiagnosticsTest struct {
}
func TestParsePythonDiagnostics(t *testing.T) {
testCases := []parsePythonDiagnosticsTest{
{
name: "short error with location",

View File

@ -9,12 +9,11 @@ import (
"io"
"os"
"path/filepath"
"strings"
"github.com/databricks/databricks-sdk-go/logger"
"github.com/fatih/color"
"strings"
"github.com/databricks/cli/libs/python"
"github.com/databricks/cli/bundle/env"
@ -94,11 +93,10 @@ func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagno
// mutateDiags is used because Mutate returns 'error' instead of 'diag.Diagnostics'
var mutateDiags diag.Diagnostics
var mutateDiagsHasError = errors.New("unexpected error")
mutateDiagsHasError := errors.New("unexpected error")
err := b.Config.Mutate(func(leftRoot dyn.Value) (dyn.Value, error) {
pythonPath, err := detectExecutable(ctx, experimental.PyDABs.VEnvPath)
if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to get Python interpreter path: %w", err)
}
@ -141,7 +139,7 @@ func createCacheDir(ctx context.Context) (string, error) {
// use 'default' as target name
cacheDir := filepath.Join(tempDir, "default", "pydabs")
err := os.MkdirAll(cacheDir, 0700)
err := os.MkdirAll(cacheDir, 0o700)
if err != nil {
return "", err
}
@ -152,7 +150,7 @@ func createCacheDir(ctx context.Context) (string, error) {
return os.MkdirTemp("", "-pydabs")
}
func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, rootPath string, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) {
func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) {
inputPath := filepath.Join(cacheDir, "input.json")
outputPath := filepath.Join(cacheDir, "output.json")
diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json")
@ -263,10 +261,10 @@ func writeInputFile(inputPath string, input dyn.Value) error {
return fmt.Errorf("failed to marshal input: %w", err)
}
return os.WriteFile(inputPath, rootConfigJson, 0600)
return os.WriteFile(inputPath, rootConfigJson, 0o600)
}
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnostics) {
func loadOutputFile(rootPath, outputPath string) (dyn.Value, diag.Diagnostics) {
outputFile, err := os.Open(outputPath)
if err != nil {
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err))
@ -381,7 +379,7 @@ func createLoadOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
return right, nil
},
VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) {
VisitUpdate: func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String())
},
}
@ -430,7 +428,7 @@ func createInitOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
return right, nil
},
VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) {
VisitUpdate: func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) {
if !valuePath.HasPrefix(jobsPath) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String())
}

View File

@ -106,7 +106,6 @@ func TestPythonMutator_load(t *testing.T) {
Column: 5,
},
}, diags[0].Locations)
}
func TestPythonMutator_load_disallowed(t *testing.T) {
@ -588,7 +587,7 @@ or activate the environment before running CLI commands:
assert.Equal(t, expected, out)
}
func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context {
func withProcessStub(t *testing.T, args []string, output, diagnostics string) context.Context {
ctx := context.Background()
ctx, stub := process.WithStub(ctx)
@ -611,10 +610,10 @@ func withProcessStub(t *testing.T, args []string, output string, diagnostics str
assert.NoError(t, err)
if reflect.DeepEqual(actual.Args, args) {
err := os.WriteFile(outputPath, []byte(output), 0600)
err := os.WriteFile(outputPath, []byte(output), 0o600)
require.NoError(t, err)
err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0600)
err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0o600)
require.NoError(t, err)
return nil
@ -626,7 +625,7 @@ func withProcessStub(t *testing.T, args []string, output string, diagnostics str
return ctx
}
func loadYaml(name string, content string) *bundle.Bundle {
func loadYaml(name, content string) *bundle.Bundle {
v, diag := config.LoadFromBytes(name, []byte(content))
if diag.Error() != nil {
@ -650,17 +649,17 @@ func withFakeVEnv(t *testing.T, venvPath string) {
interpreterPath := interpreterPath(venvPath)
err = os.MkdirAll(filepath.Dir(interpreterPath), 0755)
err = os.MkdirAll(filepath.Dir(interpreterPath), 0o755)
if err != nil {
panic(err)
}
err = os.WriteFile(interpreterPath, []byte(""), 0755)
err = os.WriteFile(interpreterPath, []byte(""), 0o755)
if err != nil {
panic(err)
}
err = os.WriteFile(filepath.Join(venvPath, "pyvenv.cfg"), []byte(""), 0755)
err = os.WriteFile(filepath.Join(venvPath, "pyvenv.cfg"), []byte(""), 0o755)
if err != nil {
panic(err)
}

View File

@ -32,11 +32,12 @@ func ResolveVariableReferencesInLookup() bundle.Mutator {
}
func ResolveVariableReferencesInComplexVariables() bundle.Mutator {
return &resolveVariableReferences{prefixes: []string{
"bundle",
"workspace",
"variables",
},
return &resolveVariableReferences{
prefixes: []string{
"bundle",
"workspace",
"variables",
},
pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")),
lookupFn: lookupForComplexVariables,
skipFn: skipResolvingInNonComplexVariables,
@ -173,7 +174,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
return dyn.InvalidValue, dynvar.ErrSkipResolution
})
})
if err != nil {
return dyn.InvalidValue, err
}
@ -184,7 +184,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
diags = diags.Extend(normaliseDiags)
return root, nil
})
if err != nil {
diags = diags.Extend(diag.FromErr(err))
}

View File

@ -63,7 +63,6 @@ func (m *rewriteWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di
return v, nil
})
})
if err != nil {
return diag.FromErr(err)
}

View File

@ -81,5 +81,4 @@ func TestNoWorkspacePrefixUsed(t *testing.T) {
require.Equal(t, "${workspace.artifact_path}/jar1.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[1].Libraries[0].Jar)
require.Equal(t, "${workspace.file_path}/notebook2", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].NotebookTask.NotebookPath)
require.Equal(t, "${workspace.artifact_path}/jar2.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].Libraries[0].Jar)
}

View File

@ -12,8 +12,7 @@ import (
"github.com/databricks/databricks-sdk-go/service/jobs"
)
type setRunAs struct {
}
type setRunAs struct{}
// This mutator does two things:
//
@ -30,7 +29,7 @@ func (m *setRunAs) Name() string {
return "SetRunAs"
}
func reportRunAsNotSupported(resourceType string, location dyn.Location, currentUser string, runAsUser string) diag.Diagnostics {
func reportRunAsNotSupported(resourceType string, location dyn.Location, currentUser, runAsUser string) diag.Diagnostics {
return diag.Diagnostics{{
Summary: fmt.Sprintf("%s do not support a setting a run_as user that is different from the owner.\n"+
"Current identity: %s. Run as identity: %s.\n"+

View File

@ -65,7 +65,6 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
// We should have had a value to set for the variable at this point.
return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
}
func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {

View File

@ -35,7 +35,7 @@ func (m *syncInferRoot) Name() string {
// If the path does not exist, it returns an empty string.
//
// See "sync_infer_root_internal_test.go" for examples.
func (m *syncInferRoot) computeRoot(path string, root string) string {
func (m *syncInferRoot) computeRoot(path, root string) string {
for !filepath.IsLocal(path) {
// Break if we have reached the root of the filesystem.
dir := filepath.Dir(root)

View File

@ -275,8 +275,8 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
}
func gatherFallbackPaths(v dyn.Value, typ string) (map[string]string, error) {
var fallback = make(map[string]string)
var pattern = dyn.NewPattern(dyn.Key("resources"), dyn.Key(typ), dyn.AnyKey())
fallback := make(map[string]string)
pattern := dyn.NewPattern(dyn.Key("resources"), dyn.Key(typ), dyn.AnyKey())
// Previous behavior was to use a resource's location as the base path to resolve
// relative paths in its definition. With the introduction of [dyn.Value] throughout,

View File

@ -34,7 +34,7 @@ func touchNotebookFile(t *testing.T, path string) {
}
func touchEmptyFile(t *testing.T, path string) {
err := os.MkdirAll(filepath.Dir(path), 0700)
err := os.MkdirAll(filepath.Dir(path), 0o700)
require.NoError(t, err)
f, err := os.Create(path)
require.NoError(t, err)

View File

@ -15,8 +15,7 @@ func VerifyCliVersion() bundle.Mutator {
return &verifyCliVersion{}
}
type verifyCliVersion struct {
}
type verifyCliVersion struct{}
func (v *verifyCliVersion) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// No constraints specified, skip the check.

View File

@ -1,7 +1,9 @@
package config
const Paused = "PAUSED"
const Unpaused = "UNPAUSED"
const (
Paused = "PAUSED"
Unpaused = "UNPAUSED"
)
type Presets struct {
// NamePrefix to prepend to all resource names.

View File

@ -168,7 +168,6 @@ func TestRootMergeTargetOverridesWithVariables(t *testing.T) {
"key1": "value1",
}, root.Variables["complex"].Default)
assert.Equal(t, "complex var", root.Variables["complex"].Description)
}
func TestIsFullVariableOverrideDef(t *testing.T) {
@ -252,5 +251,4 @@ func TestIsFullVariableOverrideDef(t *testing.T) {
for i, tc := range testCases {
assert.Equal(t, tc.expected, isFullVariableOverrideDef(tc.value), "test case %d", i)
}
}

View File

@ -13,8 +13,7 @@ func FilesToSync() bundle.ReadOnlyMutator {
return &filesToSync{}
}
type filesToSync struct {
}
type filesToSync struct{}
func (v *filesToSync) Name() string {
return "validate:files_to_sync"

View File

@ -2,6 +2,7 @@ package validate
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
@ -81,7 +82,7 @@ func TestFilesToSync_EverythingIgnored(t *testing.T) {
b := setupBundleForFilesToSyncTest(t)
// Ignore all files.
testutil.WriteFile(t, "*\n.*\n", b.BundleRootPath, ".gitignore")
testutil.WriteFile(t, filepath.Join(b.BundleRootPath, ".gitignore"), "*\n.*\n")
ctx := context.Background()
rb := bundle.ReadOnly(b)

View File

@ -15,8 +15,7 @@ import (
"golang.org/x/sync/errgroup"
)
type folderPermissions struct {
}
type folderPermissions struct{}
// Apply implements bundle.ReadOnlyMutator.
func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle) diag.Diagnostics {

View File

@ -13,8 +13,7 @@ func JobClusterKeyDefined() bundle.ReadOnlyMutator {
return &jobClusterKeyDefined{}
}
type jobClusterKeyDefined struct {
}
type jobClusterKeyDefined struct{}
func (v *jobClusterKeyDefined) Name() string {
return "validate:job_cluster_key_defined"

View File

@ -17,8 +17,7 @@ func JobTaskClusterSpec() bundle.ReadOnlyMutator {
return &jobTaskClusterSpec{}
}
type jobTaskClusterSpec struct {
}
type jobTaskClusterSpec struct{}
func (v *jobTaskClusterSpec) Name() string {
return "validate:job_task_cluster_spec"

View File

@ -175,7 +175,6 @@ func TestValidateSingleNodeClusterFailForJobClusters(t *testing.T) {
Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.foo.job_clusters[0].new_cluster")},
},
}, diags)
})
}
}

View File

@ -8,8 +8,7 @@ import (
"github.com/databricks/cli/libs/dyn"
)
type validate struct {
}
type validate struct{}
type location struct {
path string

View File

@ -17,8 +17,7 @@ func ValidateSyncPatterns() bundle.ReadOnlyMutator {
return &validateSyncPatterns{}
}
type validateSyncPatterns struct {
}
type validateSyncPatterns struct{}
func (v *validateSyncPatterns) Name() string {
return "validate:validate_sync_patterns"

View File

@ -42,7 +42,6 @@ func TestLookup_Empty(t *testing.T) {
// No string representation for an invalid lookup
assert.Empty(t, lookup.String())
}
func TestLookup_Multiple(t *testing.T) {

View File

@ -20,7 +20,6 @@ func (l resolveCluster) Resolve(ctx context.Context, w *databricks.WorkspaceClie
ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi},
},
})
if err != nil {
return "", err
}

View File

@ -15,7 +15,7 @@ func (d *DeferredMutator) Name() string {
return "deferred"
}
func Defer(mutator Mutator, finally Mutator) Mutator {
func Defer(mutator, finally Mutator) Mutator {
return &DeferredMutator{
mutator: mutator,
finally: finally,

View File

@ -19,7 +19,7 @@ func (t *mutatorWithError) Name() string {
func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) diag.Diagnostics {
t.applyCalled++
return diag.Errorf(t.errorMsg)
return diag.Errorf(t.errorMsg) // nolint:govet
}
func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) {

View File

@ -15,8 +15,10 @@ import (
"github.com/google/uuid"
)
const DeploymentStateFileName = "deployment.json"
const DeploymentStateVersion = 1
const (
DeploymentStateFileName = "deployment.json"
DeploymentStateVersion = 1
)
type File struct {
LocalPath string `json:"local_path"`
@ -132,7 +134,7 @@ func (f Filelist) ToSlice(root vfs.Path) []fileset.File {
return files
}
func isLocalStateStale(local io.Reader, remote io.Reader) bool {
func isLocalStateStale(local, remote io.Reader) bool {
localState, err := loadState(local)
if err != nil {
return true

View File

@ -44,7 +44,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
return diag.FromErr(err)
}
local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0600)
local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0o600)
if err != nil {
return diag.FromErr(err)
}

View File

@ -99,7 +99,7 @@ func testStatePull(t *testing.T, opts statePullOpts) {
snapshotPath, err := sync.SnapshotPath(opts)
require.NoError(t, err)
err = os.WriteFile(snapshotPath, []byte("snapshot"), 0644)
err = os.WriteFile(snapshotPath, []byte("snapshot"), 0o644)
require.NoError(t, err)
}
@ -110,7 +110,7 @@ func testStatePull(t *testing.T, opts statePullOpts) {
data, err := json.Marshal(opts.localState)
require.NoError(t, err)
err = os.WriteFile(statePath, data, 0644)
err = os.WriteFile(statePath, data, 0o644)
require.NoError(t, err)
}

View File

@ -74,7 +74,7 @@ func TestStatePush(t *testing.T) {
data, err := json.Marshal(state)
require.NoError(t, err)
err = os.WriteFile(statePath, data, 0644)
err = os.WriteFile(statePath, data, 0o644)
require.NoError(t, err)
diags := bundle.Apply(ctx, b, s)

View File

@ -17,8 +17,7 @@ import (
"github.com/google/uuid"
)
type stateUpdate struct {
}
type stateUpdate struct{}
func (s *stateUpdate) Name() string {
return "deploy:state-update"
@ -57,7 +56,7 @@ func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost
return diag.FromErr(err)
}
// Write the state back to the file.
f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0600)
f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0o600)
if err != nil {
log.Infof(ctx, "Unable to open deployment state file: %s", err)
return diag.FromErr(err)

View File

@ -119,7 +119,7 @@ func TestStateUpdateWithExistingState(t *testing.T) {
data, err := json.Marshal(state)
require.NoError(t, err)
err = os.WriteFile(statePath, data, 0644)
err = os.WriteFile(statePath, data, 0o644)
require.NoError(t, err)
diags := bundle.Apply(ctx, b, s)

View File

@ -42,8 +42,7 @@ func collectDashboardsFromState(ctx context.Context, b *bundle.Bundle) ([]dashbo
return dashboards, nil
}
type checkDashboardsModifiedRemotely struct {
}
type checkDashboardsModifiedRemotely struct{}
func (l *checkDashboardsModifiedRemotely) Name() string {
return "CheckDashboardsModifiedRemotely"

View File

@ -139,7 +139,7 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle
require.NoError(t, err)
// Write fake state file.
testutil.WriteFile(t, `
testutil.WriteFile(t, filepath.Join(tfDir, TerraformStateFileName), `
{
"version": 4,
"terraform_version": "1.5.5",
@ -187,5 +187,5 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle
}
]
}
`, filepath.Join(tfDir, TerraformStateFileName))
`)
}

View File

@ -23,8 +23,7 @@ func (e ErrResourceIsRunning) Error() string {
return fmt.Sprintf("%s %s is running", e.resourceType, e.resourceId)
}
type checkRunningResources struct {
}
type checkRunningResources struct{}
func (l *checkRunningResources) Name() string {
return "check-running-resources"

View File

@ -43,7 +43,7 @@ func convertToResourceStruct[T any](t *testing.T, resource *T, data any) {
}
func TestBundleToTerraformJob(t *testing.T) {
var src = resources.Job{
src := resources.Job{
JobSettings: &jobs.JobSettings{
Name: "my job",
JobClusters: []jobs.JobCluster{
@ -71,7 +71,7 @@ func TestBundleToTerraformJob(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"my_job": &src,
@ -93,7 +93,7 @@ func TestBundleToTerraformJob(t *testing.T) {
}
func TestBundleToTerraformJobPermissions(t *testing.T) {
var src = resources.Job{
src := resources.Job{
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
@ -102,7 +102,7 @@ func TestBundleToTerraformJobPermissions(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"my_job": &src,
@ -121,7 +121,7 @@ func TestBundleToTerraformJobPermissions(t *testing.T) {
}
func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
var src = resources.Job{
src := resources.Job{
JobSettings: &jobs.JobSettings{
Name: "my job",
Tasks: []jobs.Task{
@ -139,7 +139,7 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"my_job": &src,
@ -158,7 +158,7 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
}
func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
var src = resources.Job{
src := resources.Job{
JobSettings: &jobs.JobSettings{
Name: "my job",
Tasks: []jobs.Task{
@ -182,7 +182,7 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"my_job": &src,
@ -201,7 +201,7 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
}
func TestBundleToTerraformPipeline(t *testing.T) {
var src = resources.Pipeline{
src := resources.Pipeline{
PipelineSpec: &pipelines.PipelineSpec{
Name: "my pipeline",
Libraries: []pipelines.PipelineLibrary{
@ -239,7 +239,7 @@ func TestBundleToTerraformPipeline(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"my_pipeline": &src,
@ -262,7 +262,7 @@ func TestBundleToTerraformPipeline(t *testing.T) {
}
func TestBundleToTerraformPipelinePermissions(t *testing.T) {
var src = resources.Pipeline{
src := resources.Pipeline{
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
@ -271,7 +271,7 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"my_pipeline": &src,
@ -290,7 +290,7 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) {
}
func TestBundleToTerraformModel(t *testing.T) {
var src = resources.MlflowModel{
src := resources.MlflowModel{
Model: &ml.Model{
Name: "name",
Description: "description",
@ -307,7 +307,7 @@ func TestBundleToTerraformModel(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Models: map[string]*resources.MlflowModel{
"my_model": &src,
@ -330,7 +330,7 @@ func TestBundleToTerraformModel(t *testing.T) {
}
func TestBundleToTerraformModelPermissions(t *testing.T) {
var src = resources.MlflowModel{
src := resources.MlflowModel{
Model: &ml.Model{
Name: "name",
},
@ -342,7 +342,7 @@ func TestBundleToTerraformModelPermissions(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Models: map[string]*resources.MlflowModel{
"my_model": &src,
@ -361,13 +361,13 @@ func TestBundleToTerraformModelPermissions(t *testing.T) {
}
func TestBundleToTerraformExperiment(t *testing.T) {
var src = resources.MlflowExperiment{
src := resources.MlflowExperiment{
Experiment: &ml.Experiment{
Name: "name",
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Experiments: map[string]*resources.MlflowExperiment{
"my_experiment": &src,
@ -384,7 +384,7 @@ func TestBundleToTerraformExperiment(t *testing.T) {
}
func TestBundleToTerraformExperimentPermissions(t *testing.T) {
var src = resources.MlflowExperiment{
src := resources.MlflowExperiment{
Experiment: &ml.Experiment{
Name: "name",
},
@ -396,7 +396,7 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Experiments: map[string]*resources.MlflowExperiment{
"my_experiment": &src,
@ -415,7 +415,7 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) {
}
func TestBundleToTerraformModelServing(t *testing.T) {
var src = resources.ModelServingEndpoint{
src := resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name",
Config: serving.EndpointCoreConfigInput{
@ -439,7 +439,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
"my_model_serving_endpoint": &src,
@ -462,7 +462,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
}
func TestBundleToTerraformModelServingPermissions(t *testing.T) {
var src = resources.ModelServingEndpoint{
src := resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name",
@ -492,7 +492,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
"my_model_serving_endpoint": &src,
@ -511,7 +511,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
}
func TestBundleToTerraformRegisteredModel(t *testing.T) {
var src = resources.RegisteredModel{
src := resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name",
CatalogName: "catalog",
@ -520,7 +520,7 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
RegisteredModels: map[string]*resources.RegisteredModel{
"my_registered_model": &src,
@ -540,7 +540,7 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) {
}
func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
var src = resources.RegisteredModel{
src := resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name",
CatalogName: "catalog",
@ -554,7 +554,7 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
RegisteredModels: map[string]*resources.RegisteredModel{
"my_registered_model": &src,
@ -573,14 +573,14 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
}
func TestBundleToTerraformDeletedResources(t *testing.T) {
var job1 = resources.Job{
job1 := resources.Job{
JobSettings: &jobs.JobSettings{},
}
var job2 = resources.Job{
job2 := resources.Job{
ModifiedStatus: resources.ModifiedStatusDeleted,
JobSettings: &jobs.JobSettings{},
}
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"my_job1": &job1,
@ -601,10 +601,10 @@ func TestBundleToTerraformDeletedResources(t *testing.T) {
}
func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
var config = config.Root{
config := config.Root{
Resources: config.Resources{},
}
var tfState = resourcesState{
tfState := resourcesState{
Resources: []stateResource{
{
Type: "databricks_job",
@ -736,7 +736,7 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
}
func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test_job": {
@ -817,7 +817,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
},
},
}
var tfState = resourcesState{
tfState := resourcesState{
Resources: nil,
}
err := TerraformToBundle(&tfState, &config)
@ -860,7 +860,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
}
func TestTerraformToBundleModifiedResources(t *testing.T) {
var config = config.Root{
config := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"test_job": {
@ -996,7 +996,7 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
},
},
}
var tfState = resourcesState{
tfState := resourcesState{
Resources: []stateResource{
{
Type: "databricks_job",

View File

@ -145,7 +145,7 @@ func inheritEnvVars(ctx context.Context, environ map[string]string) error {
// This function is used for env vars set by the Databricks VSCode extension. The variables are intended to be used by the CLI
// bundled with the Databricks VSCode extension, but users can use different CLI versions in the VSCode terminals, in which case we want to ignore
// the variables if that CLI uses different versions of the dependencies.
func getEnvVarWithMatchingVersion(ctx context.Context, envVarName string, versionVarName string, currentVersion string) (string, error) {
func getEnvVarWithMatchingVersion(ctx context.Context, envVarName, versionVarName, currentVersion string) (string, error) {
envValue := env.Get(ctx, envVarName)
versionValue := env.Get(ctx, versionVarName)

View File

@ -400,7 +400,7 @@ func TestFindExecPathFromEnvironmentWithCorrectVersionAndBinary(t *testing.T) {
require.Equal(t, tmpBinPath, b.Config.Bundle.Terraform.ExecPath)
}
func createTempFile(t *testing.T, dest string, name string, executable bool) string {
func createTempFile(t *testing.T, dest, name string, executable bool) string {
binPath := filepath.Join(dest, name)
f, err := os.Create(binPath)
require.NoError(t, err)
@ -409,7 +409,7 @@ func createTempFile(t *testing.T, dest string, name string, executable bool) str
require.NoError(t, err)
}()
if executable {
err = f.Chmod(0777)
err = f.Chmod(0o777)
require.NoError(t, err)
}
return binPath
@ -422,7 +422,7 @@ func TestGetEnvVarWithMatchingVersion(t *testing.T) {
tmp := t.TempDir()
file := testutil.Touch(t, tmp, "bar")
var tc = []struct {
tc := []struct {
envValue string
versionValue string
currentVersion string

View File

@ -10,8 +10,7 @@ import (
"github.com/databricks/cli/libs/dyn/dynvar"
)
type interpolateMutator struct {
}
type interpolateMutator struct{}
func Interpolate() bundle.Mutator {
return &interpolateMutator{}

View File

@ -5,15 +5,19 @@ import (
"github.com/hashicorp/go-version"
)
const TerraformStateFileName = "terraform.tfstate"
const TerraformConfigFileName = "bundle.tf.json"
const (
TerraformStateFileName = "terraform.tfstate"
TerraformConfigFileName = "bundle.tf.json"
)
// Users can provide their own terraform binary and databricks terraform provider by setting the following environment variables.
// This allows users to use the CLI in an air-gapped environments. See the `debug terraform` command.
const TerraformExecPathEnv = "DATABRICKS_TF_EXEC_PATH"
const TerraformVersionEnv = "DATABRICKS_TF_VERSION"
const TerraformCliConfigPathEnv = "DATABRICKS_TF_CLI_CONFIG_FILE"
const TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION"
const (
TerraformExecPathEnv = "DATABRICKS_TF_EXEC_PATH"
TerraformVersionEnv = "DATABRICKS_TF_VERSION"
TerraformCliConfigPathEnv = "DATABRICKS_TF_CLI_CONFIG_FILE"
TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION"
)
// Terraform CLI version to use and the corresponding checksums for it. The
// checksums are used to verify the integrity of the downloaded binary. Please
@ -26,8 +30,10 @@ const TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION"
// downloaded Terraform archive.
var TerraformVersion = version.Must(version.NewVersion("1.5.5"))
const checksumLinuxArm64 = "b055aefe343d0b710d8a7afd31aeb702b37bbf4493bb9385a709991e48dfbcd2"
const checksumLinuxAmd64 = "ad0c696c870c8525357b5127680cd79c0bdf58179af9acd091d43b1d6482da4a"
const (
checksumLinuxArm64 = "b055aefe343d0b710d8a7afd31aeb702b37bbf4493bb9385a709991e48dfbcd2"
checksumLinuxAmd64 = "ad0c696c870c8525357b5127680cd79c0bdf58179af9acd091d43b1d6482da4a"
)
type Checksum struct {
LinuxArm64 string `json:"linux_arm64"`

View File

@ -14,7 +14,7 @@ import (
"github.com/stretchr/testify/require"
)
func downloadAndChecksum(t *testing.T, url string, expectedChecksum string) {
func downloadAndChecksum(t *testing.T, url, expectedChecksum string) {
resp, err := http.Get(url)
require.NoError(t, err)
defer resp.Body.Close()

View File

@ -2,7 +2,6 @@ package terraform
import (
"context"
"fmt"
"path/filepath"
"github.com/databricks/cli/bundle"
@ -57,7 +56,7 @@ func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
IsEmpty: !notEmpty,
}
log.Debugf(ctx, fmt.Sprintf("Planning complete and persisted at %s\n", planPath))
log.Debugf(ctx, "Planning complete and persisted at %s\n", planPath)
return nil
}

View File

@ -104,7 +104,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
localState, err := l.localState(ctx, b)
if errors.Is(err, fs.ErrNotExist) {
log.Infof(ctx, "Local state file does not exist. Using remote Terraform state.")
err := os.WriteFile(localStatePath, remoteContent, 0600)
err := os.WriteFile(localStatePath, remoteContent, 0o600)
return diag.FromErr(err)
}
if err != nil {
@ -114,14 +114,14 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
// If the lineage does not match, the Terraform state files do not correspond to the same deployment.
if localState.Lineage != remoteState.Lineage {
log.Infof(ctx, "Remote and local state lineages do not match. Using remote Terraform state. Invalidating local Terraform state.")
err := os.WriteFile(localStatePath, remoteContent, 0600)
err := os.WriteFile(localStatePath, remoteContent, 0o600)
return diag.FromErr(err)
}
// If the remote state is newer than the local state, we should use the remote state.
if remoteState.Serial > localState.Serial {
log.Infof(ctx, "Remote state is newer than local state. Using remote Terraform state.")
err := os.WriteFile(localStatePath, remoteContent, 0600)
err := os.WriteFile(localStatePath, remoteContent, 0o600)
return diag.FromErr(err)
}

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertCluster(t *testing.T) {
var src = resources.Cluster{
src := resources.Cluster{
ClusterSpec: &compute.ClusterSpec{
NumWorkers: 3,
SparkVersion: "13.3.x-scala2.12",
@ -93,5 +93,4 @@ func TestConvertCluster(t *testing.T) {
},
},
}, out.Permissions["cluster_my_cluster"])
}

View File

@ -17,7 +17,7 @@ const (
)
// Marshal "serialized_dashboard" as JSON if it is set in the input but not in the output.
func marshalSerializedDashboard(vin dyn.Value, vout dyn.Value) (dyn.Value, error) {
func marshalSerializedDashboard(vin, vout dyn.Value) (dyn.Value, error) {
// Skip if the "serialized_dashboard" field is already set.
if v := vout.Get(serializedDashboardFieldName); v.IsValid() {
return vout, nil

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertDashboard(t *testing.T) {
var src = resources.Dashboard{
src := resources.Dashboard{
Dashboard: &dashboards.Dashboard{
DisplayName: "my dashboard",
WarehouseId: "f00dcafe",
@ -60,7 +60,7 @@ func TestConvertDashboard(t *testing.T) {
}
func TestConvertDashboardFilePath(t *testing.T) {
var src = resources.Dashboard{
src := resources.Dashboard{
FilePath: "some/path",
}
@ -84,7 +84,7 @@ func TestConvertDashboardFilePath(t *testing.T) {
}
func TestConvertDashboardFilePathQuoted(t *testing.T) {
var src = resources.Dashboard{
src := resources.Dashboard{
FilePath: `C:\foo\bar\baz\dashboard.lvdash.json`,
}
@ -108,7 +108,7 @@ func TestConvertDashboardFilePathQuoted(t *testing.T) {
}
func TestConvertDashboardSerializedDashboardString(t *testing.T) {
var src = resources.Dashboard{
src := resources.Dashboard{
SerializedDashboard: `{ "json": true }`,
}
@ -127,7 +127,7 @@ func TestConvertDashboardSerializedDashboardString(t *testing.T) {
}
func TestConvertDashboardSerializedDashboardAny(t *testing.T) {
var src = resources.Dashboard{
src := resources.Dashboard{
SerializedDashboard: map[string]any{
"pages": []map[string]any{
{

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertExperiment(t *testing.T) {
var src = resources.MlflowExperiment{
src := resources.MlflowExperiment{
Experiment: &ml.Experiment{
Name: "name",
},

View File

@ -13,7 +13,7 @@ import (
)
func TestConvertGrants(t *testing.T) {
var src = resources.RegisteredModel{
src := resources.RegisteredModel{
Grants: []resources.Grant{
{
Privileges: []string{"EXECUTE", "FOO"},
@ -45,7 +45,7 @@ func TestConvertGrants(t *testing.T) {
}
func TestConvertGrantsNil(t *testing.T) {
var src = resources.RegisteredModel{
src := resources.RegisteredModel{
Grants: nil,
}
@ -58,7 +58,7 @@ func TestConvertGrantsNil(t *testing.T) {
}
func TestConvertGrantsEmpty(t *testing.T) {
var src = resources.RegisteredModel{
src := resources.RegisteredModel{
Grants: []resources.Grant{},
}

View File

@ -83,7 +83,6 @@ func convertJobResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
"libraries": "library",
})
})
if err != nil {
return dyn.InvalidValue, err
}

View File

@ -15,7 +15,7 @@ import (
)
func TestConvertJob(t *testing.T) {
var src = resources.Job{
src := resources.Job{
JobSettings: &jobs.JobSettings{
Name: "my job",
JobClusters: []jobs.JobCluster{

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertModelServingEndpoint(t *testing.T) {
var src = resources.ModelServingEndpoint{
src := resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name",
Config: serving.EndpointCoreConfigInput{

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertModel(t *testing.T) {
var src = resources.MlflowModel{
src := resources.MlflowModel{
Model: &ml.Model{
Name: "name",
Description: "description",

View File

@ -13,7 +13,7 @@ import (
)
func TestConvertPermissions(t *testing.T) {
var src = resources.Job{
src := resources.Job{
Permissions: []resources.Permission{
{
Level: "CAN_VIEW",
@ -59,7 +59,7 @@ func TestConvertPermissions(t *testing.T) {
}
func TestConvertPermissionsNil(t *testing.T) {
var src = resources.Job{
src := resources.Job{
Permissions: nil,
}
@ -72,7 +72,7 @@ func TestConvertPermissionsNil(t *testing.T) {
}
func TestConvertPermissionsEmpty(t *testing.T) {
var src = resources.Job{
src := resources.Job{
Permissions: []resources.Permission{},
}

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertPipeline(t *testing.T) {
var src = resources.Pipeline{
src := resources.Pipeline{
PipelineSpec: &pipelines.PipelineSpec{
Name: "my pipeline",
Libraries: []pipelines.PipelineLibrary{

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertQualityMonitor(t *testing.T) {
var src = resources.QualityMonitor{
src := resources.QualityMonitor{
TableName: "test_table_name",
CreateMonitor: &catalog.CreateMonitor{
AssetsDir: "assets_dir",

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertRegisteredModel(t *testing.T) {
var src = resources.RegisteredModel{
src := resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name",
CatalogName: "catalog",

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertSchema(t *testing.T) {
var src = resources.Schema{
src := resources.Schema{
CreateSchema: &catalog.CreateSchema{
Name: "name",
CatalogName: "catalog",

View File

@ -14,7 +14,7 @@ import (
)
func TestConvertVolume(t *testing.T) {
var src = resources.Volume{
src := resources.Volume{
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
CatalogName: "catalog",
Comment: "comment",

View File

@ -11,7 +11,7 @@ import (
// definition uses the plural name. This function can convert between the two.
func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) {
var err error
var acc = dyn.V(map[string]dyn.Value{})
acc := dyn.V(map[string]dyn.Value{})
nv, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if len(p) == 0 {
@ -36,7 +36,6 @@ func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) {
// Pass through all other values.
return v, dyn.ErrSkip
})
if err != nil {
return dyn.InvalidValue, err
}

View File

@ -37,6 +37,6 @@ func (*unbind) Name() string {
return "terraform.Unbind"
}
func Unbind(resourceType string, resourceKey string) bundle.Mutator {
func Unbind(resourceType, resourceKey string) bundle.Mutator {
return &unbind{resourceType: resourceType, resourceKey: resourceKey}
}

View File

@ -50,7 +50,8 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
{
Type: jsonschema.StringType,
Pattern: interpolationPattern("var"),
}},
},
},
}
case jsonschema.IntegerType, jsonschema.NumberType, jsonschema.BooleanType:
// primitives can have variable values, or references like ${bundle.xyz}
@ -173,7 +174,7 @@ func generateSchema(workdir, outputFile string) {
}
// Write the schema descriptions to the output file.
err = os.WriteFile(outputFile, b, 0644)
err = os.WriteFile(outputFile, b, 0o644)
if err != nil {
log.Fatal(err)
}

View File

@ -2,9 +2,8 @@ package generator
import (
"fmt"
"strings"
"slices"
"strings"
tfjson "github.com/hashicorp/terraform-json"
"github.com/iancoleman/strcase"
@ -70,6 +69,25 @@ func nestedBlockKeys(block *tfjson.SchemaBlock) []string {
return keys
}
func nestedField(name []string, k string, isRef bool) field {
// Collect field properties.
fieldName := strcase.ToCamel(k)
fieldTypePrefix := ""
if isRef {
fieldTypePrefix = "*"
} else {
fieldTypePrefix = "[]"
}
fieldType := fmt.Sprintf("%s%s", fieldTypePrefix, strings.Join(append(name, strcase.ToCamel(k)), ""))
fieldTag := fmt.Sprintf("%s,omitempty", k)
return field{
Name: fieldName,
Type: fieldType,
Tag: fieldTag,
}
}
func (w *walker) walk(block *tfjson.SchemaBlock, name []string) error {
// Produce nested types before this block itself.
// This ensures types are defined before they are referenced.
@ -91,10 +109,24 @@ func (w *walker) walk(block *tfjson.SchemaBlock, name []string) error {
v := block.Attributes[k]
// Assert the attribute type is always set.
if v.AttributeType == cty.NilType {
if v.AttributeType == cty.NilType && v.AttributeNestedType == nil {
return fmt.Errorf("unexpected nil type for attribute %s", k)
}
// If there is a nested type, walk it and continue to next attribute.
if v.AttributeNestedType != nil {
nestedBlock := &tfjson.SchemaBlock{
Attributes: v.AttributeNestedType.Attributes,
}
err := w.walk(nestedBlock, append(name, strcase.ToCamel(k)))
if err != nil {
return err
}
// Append to list of fields for type.
typ.Fields = append(typ.Fields, nestedField(name, k, v.AttributeNestedType.NestingMode == tfjson.SchemaNestingModeSingle))
continue
}
// Collect field properties.
fieldName := strcase.ToCamel(k)
fieldType := processAttributeType(v.AttributeType)
@ -117,24 +149,8 @@ func (w *walker) walk(block *tfjson.SchemaBlock, name []string) error {
// Declare nested blocks.
for _, k := range nestedBlockKeys(block) {
v := block.NestedBlocks[k]
// Collect field properties.
fieldName := strcase.ToCamel(k)
fieldTypePrefix := ""
if v.MaxItems == 1 {
fieldTypePrefix = "*"
} else {
fieldTypePrefix = "[]"
}
fieldType := fmt.Sprintf("%s%s", fieldTypePrefix, strings.Join(append(name, strcase.ToCamel(k)), ""))
fieldTag := fmt.Sprintf("%s,omitempty", k)
// Append to list of fields for type.
typ.Fields = append(typ.Fields, field{
Name: fieldName,
Type: fieldType,
Tag: fieldTag,
})
typ.Fields = append(typ.Fields, nestedField(name, k, v.MaxItems == 1))
}
// Append type to list of structs.

View File

@ -20,7 +20,7 @@ require (
github.com/cloudflare/circl v1.5.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
golang.org/x/crypto v0.30.0 // indirect
golang.org/x/crypto v0.31.0 // indirect
golang.org/x/mod v0.22.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect

View File

@ -60,8 +60,8 @@ github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM
github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw=
github.com/zclconf/go-cty v1.15.1 h1:RgQYm4j2EvoBRXOPxhUvxPzRrGDo1eCOhHXuGfrj5S0=
github.com/zclconf/go-cty v1.15.1/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY=
golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/exp v0.0.0-20241204233417-43b7b7cde48d h1:0olWaB5pg3+oychR51GUVCEsGkeCU/2JxjBgIo4f3M0=
golang.org/x/exp v0.0.0-20241204233417-43b7b7cde48d/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c=
golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4=

View File

@ -1,3 +1,3 @@
package schema
const ProviderVersion = "1.59.0"
const ProviderVersion = "1.61.0"

View File

@ -25,9 +25,9 @@ const ProviderVersion = "{{ .ProviderVersion }}"
func NewRoot() *Root {
return &Root{
Terraform: map[string]interface{}{
"required_providers": map[string]interface{}{
"databricks": map[string]interface{}{
Terraform: map[string]any{
"required_providers": map[string]any{
"databricks": map[string]any{
"source": ProviderSource,
"version": ProviderVersion,
},

View File

@ -0,0 +1,107 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceAppAppActiveDeploymentDeploymentArtifacts struct {
SourceCodePath string `json:"source_code_path,omitempty"`
}
type DataSourceAppAppActiveDeploymentStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppAppActiveDeployment struct {
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DeploymentArtifacts *DataSourceAppAppActiveDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
DeploymentId string `json:"deployment_id,omitempty"`
Mode string `json:"mode,omitempty"`
SourceCodePath string `json:"source_code_path,omitempty"`
Status *DataSourceAppAppActiveDeploymentStatus `json:"status,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
}
type DataSourceAppAppAppStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppAppComputeStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppAppPendingDeploymentDeploymentArtifacts struct {
SourceCodePath string `json:"source_code_path,omitempty"`
}
type DataSourceAppAppPendingDeploymentStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppAppPendingDeployment struct {
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DeploymentArtifacts *DataSourceAppAppPendingDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
DeploymentId string `json:"deployment_id,omitempty"`
Mode string `json:"mode,omitempty"`
SourceCodePath string `json:"source_code_path,omitempty"`
Status *DataSourceAppAppPendingDeploymentStatus `json:"status,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
}
type DataSourceAppAppResourcesJob struct {
Id string `json:"id"`
Permission string `json:"permission"`
}
type DataSourceAppAppResourcesSecret struct {
Key string `json:"key"`
Permission string `json:"permission"`
Scope string `json:"scope"`
}
type DataSourceAppAppResourcesServingEndpoint struct {
Name string `json:"name"`
Permission string `json:"permission"`
}
type DataSourceAppAppResourcesSqlWarehouse struct {
Id string `json:"id"`
Permission string `json:"permission"`
}
type DataSourceAppAppResources struct {
Description string `json:"description,omitempty"`
Job *DataSourceAppAppResourcesJob `json:"job,omitempty"`
Name string `json:"name"`
Secret *DataSourceAppAppResourcesSecret `json:"secret,omitempty"`
ServingEndpoint *DataSourceAppAppResourcesServingEndpoint `json:"serving_endpoint,omitempty"`
SqlWarehouse *DataSourceAppAppResourcesSqlWarehouse `json:"sql_warehouse,omitempty"`
}
type DataSourceAppApp struct {
ActiveDeployment *DataSourceAppAppActiveDeployment `json:"active_deployment,omitempty"`
AppStatus *DataSourceAppAppAppStatus `json:"app_status,omitempty"`
ComputeStatus *DataSourceAppAppComputeStatus `json:"compute_status,omitempty"`
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DefaultSourceCodePath string `json:"default_source_code_path,omitempty"`
Description string `json:"description,omitempty"`
Name string `json:"name"`
PendingDeployment *DataSourceAppAppPendingDeployment `json:"pending_deployment,omitempty"`
Resources []DataSourceAppAppResources `json:"resources,omitempty"`
ServicePrincipalClientId string `json:"service_principal_client_id,omitempty"`
ServicePrincipalId int `json:"service_principal_id,omitempty"`
ServicePrincipalName string `json:"service_principal_name,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
Updater string `json:"updater,omitempty"`
Url string `json:"url,omitempty"`
}
type DataSourceApp struct {
App *DataSourceAppApp `json:"app,omitempty"`
Name string `json:"name"`
}

View File

@ -0,0 +1,106 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type DataSourceAppsAppActiveDeploymentDeploymentArtifacts struct {
SourceCodePath string `json:"source_code_path,omitempty"`
}
type DataSourceAppsAppActiveDeploymentStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppsAppActiveDeployment struct {
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DeploymentArtifacts *DataSourceAppsAppActiveDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
DeploymentId string `json:"deployment_id,omitempty"`
Mode string `json:"mode,omitempty"`
SourceCodePath string `json:"source_code_path,omitempty"`
Status *DataSourceAppsAppActiveDeploymentStatus `json:"status,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
}
type DataSourceAppsAppAppStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppsAppComputeStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppsAppPendingDeploymentDeploymentArtifacts struct {
SourceCodePath string `json:"source_code_path,omitempty"`
}
type DataSourceAppsAppPendingDeploymentStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type DataSourceAppsAppPendingDeployment struct {
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DeploymentArtifacts *DataSourceAppsAppPendingDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
DeploymentId string `json:"deployment_id,omitempty"`
Mode string `json:"mode,omitempty"`
SourceCodePath string `json:"source_code_path,omitempty"`
Status *DataSourceAppsAppPendingDeploymentStatus `json:"status,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
}
type DataSourceAppsAppResourcesJob struct {
Id string `json:"id"`
Permission string `json:"permission"`
}
type DataSourceAppsAppResourcesSecret struct {
Key string `json:"key"`
Permission string `json:"permission"`
Scope string `json:"scope"`
}
type DataSourceAppsAppResourcesServingEndpoint struct {
Name string `json:"name"`
Permission string `json:"permission"`
}
type DataSourceAppsAppResourcesSqlWarehouse struct {
Id string `json:"id"`
Permission string `json:"permission"`
}
type DataSourceAppsAppResources struct {
Description string `json:"description,omitempty"`
Job *DataSourceAppsAppResourcesJob `json:"job,omitempty"`
Name string `json:"name"`
Secret *DataSourceAppsAppResourcesSecret `json:"secret,omitempty"`
ServingEndpoint *DataSourceAppsAppResourcesServingEndpoint `json:"serving_endpoint,omitempty"`
SqlWarehouse *DataSourceAppsAppResourcesSqlWarehouse `json:"sql_warehouse,omitempty"`
}
type DataSourceAppsApp struct {
ActiveDeployment *DataSourceAppsAppActiveDeployment `json:"active_deployment,omitempty"`
AppStatus *DataSourceAppsAppAppStatus `json:"app_status,omitempty"`
ComputeStatus *DataSourceAppsAppComputeStatus `json:"compute_status,omitempty"`
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DefaultSourceCodePath string `json:"default_source_code_path,omitempty"`
Description string `json:"description,omitempty"`
Name string `json:"name"`
PendingDeployment *DataSourceAppsAppPendingDeployment `json:"pending_deployment,omitempty"`
Resources []DataSourceAppsAppResources `json:"resources,omitempty"`
ServicePrincipalClientId string `json:"service_principal_client_id,omitempty"`
ServicePrincipalId int `json:"service_principal_id,omitempty"`
ServicePrincipalName string `json:"service_principal_name,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
Updater string `json:"updater,omitempty"`
Url string `json:"url,omitempty"`
}
type DataSourceApps struct {
App []DataSourceAppsApp `json:"app,omitempty"`
}

View File

@ -69,6 +69,7 @@ type DataSourceFunctionsFunctions struct {
FullDataType string `json:"full_data_type,omitempty"`
FullName string `json:"full_name,omitempty"`
FunctionId string `json:"function_id,omitempty"`
InputParams []DataSourceFunctionsFunctionsInputParams `json:"input_params,omitempty"`
IsDeterministic bool `json:"is_deterministic,omitempty"`
IsNullCall bool `json:"is_null_call,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
@ -76,8 +77,10 @@ type DataSourceFunctionsFunctions struct {
Owner string `json:"owner,omitempty"`
ParameterStyle string `json:"parameter_style,omitempty"`
Properties string `json:"properties,omitempty"`
ReturnParams []DataSourceFunctionsFunctionsReturnParams `json:"return_params,omitempty"`
RoutineBody string `json:"routine_body,omitempty"`
RoutineDefinition string `json:"routine_definition,omitempty"`
RoutineDependencies []DataSourceFunctionsFunctionsRoutineDependencies `json:"routine_dependencies,omitempty"`
SchemaName string `json:"schema_name,omitempty"`
SecurityType string `json:"security_type,omitempty"`
SpecificName string `json:"specific_name,omitempty"`
@ -85,14 +88,11 @@ type DataSourceFunctionsFunctions struct {
SqlPath string `json:"sql_path,omitempty"`
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
InputParams []DataSourceFunctionsFunctionsInputParams `json:"input_params,omitempty"`
ReturnParams []DataSourceFunctionsFunctionsReturnParams `json:"return_params,omitempty"`
RoutineDependencies []DataSourceFunctionsFunctionsRoutineDependencies `json:"routine_dependencies,omitempty"`
}
type DataSourceFunctions struct {
CatalogName string `json:"catalog_name"`
Functions []DataSourceFunctionsFunctions `json:"functions,omitempty"`
IncludeBrowse bool `json:"include_browse,omitempty"`
SchemaName string `json:"schema_name"`
Functions []DataSourceFunctionsFunctions `json:"functions,omitempty"`
}

View File

@ -3,6 +3,7 @@
package schema
type DataSourceJobs struct {
Id string `json:"id,omitempty"`
Ids map[string]string `json:"ids,omitempty"`
Id string `json:"id,omitempty"`
Ids map[string]string `json:"ids,omitempty"`
JobNameContains string `json:"job_name_contains,omitempty"`
}

View File

@ -10,6 +10,6 @@ type DataSourceNotificationDestinationsNotificationDestinations struct {
type DataSourceNotificationDestinations struct {
DisplayNameContains string `json:"display_name_contains,omitempty"`
Type string `json:"type,omitempty"`
NotificationDestinations []DataSourceNotificationDestinationsNotificationDestinations `json:"notification_destinations,omitempty"`
Type string `json:"type,omitempty"`
}

View File

@ -8,6 +8,7 @@ type DataSourceRegisteredModelModelInfoAliases struct {
}
type DataSourceRegisteredModelModelInfo struct {
Aliases []DataSourceRegisteredModelModelInfoAliases `json:"aliases,omitempty"`
BrowseOnly bool `json:"browse_only,omitempty"`
CatalogName string `json:"catalog_name,omitempty"`
Comment string `json:"comment,omitempty"`
@ -21,7 +22,6 @@ type DataSourceRegisteredModelModelInfo struct {
StorageLocation string `json:"storage_location,omitempty"`
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
Aliases []DataSourceRegisteredModelModelInfoAliases `json:"aliases,omitempty"`
}
type DataSourceRegisteredModel struct {

View File

@ -25,6 +25,7 @@ type DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies stru
}
type DataSourceRegisteredModelVersionsModelVersions struct {
Aliases []DataSourceRegisteredModelVersionsModelVersionsAliases `json:"aliases,omitempty"`
BrowseOnly bool `json:"browse_only,omitempty"`
CatalogName string `json:"catalog_name,omitempty"`
Comment string `json:"comment,omitempty"`
@ -33,6 +34,7 @@ type DataSourceRegisteredModelVersionsModelVersions struct {
Id string `json:"id,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
ModelName string `json:"model_name,omitempty"`
ModelVersionDependencies []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies `json:"model_version_dependencies,omitempty"`
RunId string `json:"run_id,omitempty"`
RunWorkspaceId int `json:"run_workspace_id,omitempty"`
SchemaName string `json:"schema_name,omitempty"`
@ -42,8 +44,6 @@ type DataSourceRegisteredModelVersionsModelVersions struct {
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
Version int `json:"version,omitempty"`
Aliases []DataSourceRegisteredModelVersionsModelVersionsAliases `json:"aliases,omitempty"`
ModelVersionDependencies []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies `json:"model_version_dependencies,omitempty"`
}
type DataSourceRegisteredModelVersions struct {

View File

@ -8,9 +8,9 @@ type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii struct {
type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput struct {
InvalidKeywords []string `json:"invalid_keywords,omitempty"`
Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii `json:"pii,omitempty"`
Safety bool `json:"safety,omitempty"`
ValidTopics []string `json:"valid_topics,omitempty"`
Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii `json:"pii,omitempty"`
}
type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii struct {
@ -19,9 +19,9 @@ type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii struct {
type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutput struct {
InvalidKeywords []string `json:"invalid_keywords,omitempty"`
Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii `json:"pii,omitempty"`
Safety bool `json:"safety,omitempty"`
ValidTopics []string `json:"valid_topics,omitempty"`
Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii `json:"pii,omitempty"`
}
type DataSourceServingEndpointsEndpointsAiGatewayGuardrails struct {
@ -111,17 +111,17 @@ type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelPalmCon
}
type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModel struct {
Name string `json:"name"`
Provider string `json:"provider"`
Task string `json:"task"`
Ai21LabsConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAi21LabsConfig `json:"ai21labs_config,omitempty"`
AmazonBedrockConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAmazonBedrockConfig `json:"amazon_bedrock_config,omitempty"`
AnthropicConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAnthropicConfig `json:"anthropic_config,omitempty"`
CohereConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelCohereConfig `json:"cohere_config,omitempty"`
DatabricksModelServingConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelDatabricksModelServingConfig `json:"databricks_model_serving_config,omitempty"`
GoogleCloudVertexAiConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig `json:"google_cloud_vertex_ai_config,omitempty"`
Name string `json:"name"`
OpenaiConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelOpenaiConfig `json:"openai_config,omitempty"`
PalmConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelPalmConfig `json:"palm_config,omitempty"`
Provider string `json:"provider"`
Task string `json:"task"`
}
type DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel struct {
@ -134,9 +134,9 @@ type DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel stru
type DataSourceServingEndpointsEndpointsConfigServedEntities struct {
EntityName string `json:"entity_name,omitempty"`
EntityVersion string `json:"entity_version,omitempty"`
Name string `json:"name,omitempty"`
ExternalModel []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModel `json:"external_model,omitempty"`
FoundationModel []DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel `json:"foundation_model,omitempty"`
Name string `json:"name,omitempty"`
}
type DataSourceServingEndpointsEndpointsConfigServedModels struct {
@ -161,16 +161,16 @@ type DataSourceServingEndpointsEndpointsTags struct {
}
type DataSourceServingEndpointsEndpoints struct {
AiGateway []DataSourceServingEndpointsEndpointsAiGateway `json:"ai_gateway,omitempty"`
Config []DataSourceServingEndpointsEndpointsConfig `json:"config,omitempty"`
CreationTimestamp int `json:"creation_timestamp,omitempty"`
Creator string `json:"creator,omitempty"`
Id string `json:"id,omitempty"`
LastUpdatedTimestamp int `json:"last_updated_timestamp,omitempty"`
Name string `json:"name,omitempty"`
Task string `json:"task,omitempty"`
AiGateway []DataSourceServingEndpointsEndpointsAiGateway `json:"ai_gateway,omitempty"`
Config []DataSourceServingEndpointsEndpointsConfig `json:"config,omitempty"`
State []DataSourceServingEndpointsEndpointsState `json:"state,omitempty"`
Tags []DataSourceServingEndpointsEndpointsTags `json:"tags,omitempty"`
Task string `json:"task,omitempty"`
}
type DataSourceServingEndpoints struct {

View File

@ -3,6 +3,8 @@
package schema
type DataSources struct {
App map[string]any `json:"databricks_app,omitempty"`
Apps map[string]any `json:"databricks_apps,omitempty"`
AwsAssumeRolePolicy map[string]any `json:"databricks_aws_assume_role_policy,omitempty"`
AwsBucketPolicy map[string]any `json:"databricks_aws_bucket_policy,omitempty"`
AwsCrossaccountPolicy map[string]any `json:"databricks_aws_crossaccount_policy,omitempty"`
@ -66,6 +68,8 @@ type DataSources struct {
func NewDataSources() *DataSources {
return &DataSources{
App: make(map[string]any),
Apps: make(map[string]any),
AwsAssumeRolePolicy: make(map[string]any),
AwsBucketPolicy: make(map[string]any),
AwsCrossaccountPolicy: make(map[string]any),

View File

@ -0,0 +1,102 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceAppActiveDeploymentDeploymentArtifacts struct {
SourceCodePath string `json:"source_code_path,omitempty"`
}
type ResourceAppActiveDeploymentStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type ResourceAppActiveDeployment struct {
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DeploymentArtifacts *ResourceAppActiveDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
DeploymentId string `json:"deployment_id,omitempty"`
Mode string `json:"mode,omitempty"`
SourceCodePath string `json:"source_code_path,omitempty"`
Status *ResourceAppActiveDeploymentStatus `json:"status,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
}
type ResourceAppAppStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type ResourceAppComputeStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type ResourceAppPendingDeploymentDeploymentArtifacts struct {
SourceCodePath string `json:"source_code_path,omitempty"`
}
type ResourceAppPendingDeploymentStatus struct {
Message string `json:"message,omitempty"`
State string `json:"state,omitempty"`
}
type ResourceAppPendingDeployment struct {
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DeploymentArtifacts *ResourceAppPendingDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"`
DeploymentId string `json:"deployment_id,omitempty"`
Mode string `json:"mode,omitempty"`
SourceCodePath string `json:"source_code_path,omitempty"`
Status *ResourceAppPendingDeploymentStatus `json:"status,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
}
type ResourceAppResourcesJob struct {
Id string `json:"id"`
Permission string `json:"permission"`
}
type ResourceAppResourcesSecret struct {
Key string `json:"key"`
Permission string `json:"permission"`
Scope string `json:"scope"`
}
type ResourceAppResourcesServingEndpoint struct {
Name string `json:"name"`
Permission string `json:"permission"`
}
type ResourceAppResourcesSqlWarehouse struct {
Id string `json:"id"`
Permission string `json:"permission"`
}
type ResourceAppResources struct {
Description string `json:"description,omitempty"`
Job *ResourceAppResourcesJob `json:"job,omitempty"`
Name string `json:"name"`
Secret *ResourceAppResourcesSecret `json:"secret,omitempty"`
ServingEndpoint *ResourceAppResourcesServingEndpoint `json:"serving_endpoint,omitempty"`
SqlWarehouse *ResourceAppResourcesSqlWarehouse `json:"sql_warehouse,omitempty"`
}
type ResourceApp struct {
ActiveDeployment *ResourceAppActiveDeployment `json:"active_deployment,omitempty"`
AppStatus *ResourceAppAppStatus `json:"app_status,omitempty"`
ComputeStatus *ResourceAppComputeStatus `json:"compute_status,omitempty"`
CreateTime string `json:"create_time,omitempty"`
Creator string `json:"creator,omitempty"`
DefaultSourceCodePath string `json:"default_source_code_path,omitempty"`
Description string `json:"description,omitempty"`
Name string `json:"name"`
PendingDeployment *ResourceAppPendingDeployment `json:"pending_deployment,omitempty"`
Resources []ResourceAppResources `json:"resources,omitempty"`
ServicePrincipalClientId string `json:"service_principal_client_id,omitempty"`
ServicePrincipalId int `json:"service_principal_id,omitempty"`
ServicePrincipalName string `json:"service_principal_name,omitempty"`
UpdateTime string `json:"update_time,omitempty"`
Updater string `json:"updater,omitempty"`
Url string `json:"url,omitempty"`
}

View File

@ -9,6 +9,7 @@ type ResourceAzureAdlsGen2Mount struct {
ClusterId string `json:"cluster_id,omitempty"`
ContainerName string `json:"container_name"`
Directory string `json:"directory,omitempty"`
Environment string `json:"environment,omitempty"`
Id string `json:"id,omitempty"`
InitializeFileSystem bool `json:"initialize_file_system"`
MountName string `json:"mount_name"`

View File

@ -0,0 +1,52 @@
// Generated from Databricks Terraform provider schema. DO NOT EDIT.
package schema
type ResourceCredentialAwsIamRole struct {
ExternalId string `json:"external_id,omitempty"`
RoleArn string `json:"role_arn,omitempty"`
UnityCatalogIamArn string `json:"unity_catalog_iam_arn,omitempty"`
}
type ResourceCredentialAzureManagedIdentity struct {
AccessConnectorId string `json:"access_connector_id"`
CredentialId string `json:"credential_id,omitempty"`
ManagedIdentityId string `json:"managed_identity_id,omitempty"`
}
type ResourceCredentialAzureServicePrincipal struct {
ApplicationId string `json:"application_id"`
ClientSecret string `json:"client_secret"`
DirectoryId string `json:"directory_id"`
}
type ResourceCredentialDatabricksGcpServiceAccount struct {
CredentialId string `json:"credential_id,omitempty"`
Email string `json:"email,omitempty"`
PrivateKeyId string `json:"private_key_id,omitempty"`
}
type ResourceCredential struct {
Comment string `json:"comment,omitempty"`
CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
CredentialId string `json:"credential_id,omitempty"`
ForceDestroy bool `json:"force_destroy,omitempty"`
ForceUpdate bool `json:"force_update,omitempty"`
FullName string `json:"full_name,omitempty"`
Id string `json:"id,omitempty"`
IsolationMode string `json:"isolation_mode,omitempty"`
MetastoreId string `json:"metastore_id,omitempty"`
Name string `json:"name"`
Owner string `json:"owner,omitempty"`
Purpose string `json:"purpose"`
ReadOnly bool `json:"read_only,omitempty"`
SkipValidation bool `json:"skip_validation,omitempty"`
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
UsedForManagedStorage bool `json:"used_for_managed_storage,omitempty"`
AwsIamRole *ResourceCredentialAwsIamRole `json:"aws_iam_role,omitempty"`
AzureManagedIdentity *ResourceCredentialAzureManagedIdentity `json:"azure_managed_identity,omitempty"`
AzureServicePrincipal *ResourceCredentialAzureServicePrincipal `json:"azure_service_principal,omitempty"`
DatabricksGcpServiceAccount *ResourceCredentialDatabricksGcpServiceAccount `json:"databricks_gcp_service_account,omitempty"`
}

Some files were not shown because too many files have changed in this diff Show More