From 54a470837ca0814955b4b1f8b41261645f4d546f Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Wed, 22 Jan 2025 14:28:13 +0100
Subject: [PATCH 01/39] Fix context propagation in bundle/deploy/terraform
 (#2208)

https://github.com/databricks/cli/pull/747#discussion_r1925248116
---
 bundle/deploy/terraform/init.go | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go
index cc9cd4415..6a014a7c1 100644
--- a/bundle/deploy/terraform/init.go
+++ b/bundle/deploy/terraform/init.go
@@ -54,7 +54,7 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con
 		return tf.ExecPath, nil
 	}
 
-	binDir, err := b.CacheDir(context.Background(), "bin")
+	binDir, err := b.CacheDir(ctx, "bin")
 	if err != nil {
 		return "", err
 	}

From 3d91691f25e85220646df5b9bd8a672e28b14e7f Mon Sep 17 00:00:00 2001
From: Gleb Kanterov <kanterov@users.noreply.github.com>
Date: Wed, 22 Jan 2025 16:37:37 +0100
Subject: [PATCH 02/39] PythonMutator: propagate source locations (#1783)

## Changes
Add a mechanism to load Python source locations in the Python mutator.
Previously, locations pointed to generated YAML. Now, they point to
Python sources instead. Python process outputs "locations.json"
containing locations of bundle paths, examples:

```json
{"path": "resources.jobs.job_0", "file": "resources/job_0.py", "line": 3, "column": 5}
{"path": "resources.jobs.job_0.tasks[0].task_key", "file": "resources/job_0.py", "line": 10, "column": 5}
{"path": "resources.jobs.job_1", "file": "resources/job_1.py", "line": 5, "column": 7}
```

Such locations form a tree, and we assign locations of the closest
ancestor to each `dyn.Value` based on its path. For example,
`resources.jobs.job_0.tasks[0].task_key` is located at `job_0.py:10:5`
and `resources.jobs.job_0.tasks[0].email_notifications` is located at
`job_0.py:3:5`, because we use the location of the job as the most
precise approximation.

This feature is only enabled if `experimental/python` is used.

Note: for now, we don't update locations with relative paths, because it
has a side effect in changing how these paths are resolved

## Example
```
% databricks bundle validate

Warning: job_cluster_key abc is not defined
  at resources.jobs.examples.tasks[0].job_cluster_key
  in resources/example.py:10:1
```

## Tests
Unit tests and manually
---
 .../mutator/python/python_diagnostics.go      |   1 +
 .../config/mutator/python/python_locations.go | 194 ++++++++++++++++++
 .../mutator/python/python_locations_test.go   | 179 ++++++++++++++++
 .../config/mutator/python/python_mutator.go   | 101 +++++++--
 .../mutator/python/python_mutator_test.go     |  79 +++++--
 5 files changed, 518 insertions(+), 36 deletions(-)
 create mode 100644 bundle/config/mutator/python/python_locations.go
 create mode 100644 bundle/config/mutator/python/python_locations_test.go

diff --git a/bundle/config/mutator/python/python_diagnostics.go b/bundle/config/mutator/python/python_diagnostics.go
index 12822065b..7a1e13b4e 100644
--- a/bundle/config/mutator/python/python_diagnostics.go
+++ b/bundle/config/mutator/python/python_diagnostics.go
@@ -9,6 +9,7 @@ import (
 	"github.com/databricks/cli/libs/dyn"
 )
 
+// pythonDiagnostic is a single entry in diagnostics.json
 type pythonDiagnostic struct {
 	Severity pythonSeverity           `json:"severity"`
 	Summary  string                   `json:"summary"`
diff --git a/bundle/config/mutator/python/python_locations.go b/bundle/config/mutator/python/python_locations.go
new file mode 100644
index 000000000..2fa86bea0
--- /dev/null
+++ b/bundle/config/mutator/python/python_locations.go
@@ -0,0 +1,194 @@
+package python
+
+import (
+	"encoding/json"
+	"fmt"
+	"io"
+	"path/filepath"
+
+	"github.com/databricks/cli/libs/dyn"
+)
+
+// generatedFileName is used as the virtual file name for YAML generated by Python code.
+//
+// mergePythonLocations replaces dyn.Location with generatedFileName with locations loaded
+// from locations.json
+const generatedFileName = "__generated_by_python__.yml"
+
+// pythonLocations is data structure for efficient location lookup for a given path
+//
+// Locations form a tree, and we assign locations of the closest ancestor to each dyn.Value based on its path.
+// We implement it as a trie (prefix tree) where keys are components of the path. With that, lookups are O(n)
+// where n is the number of components in the path.
+//
+// For example, with locations.json:
+//
+//		{"path": "resources.jobs.job_0", "file": "resources/job_0.py", "line": 3, "column": 5}
+//		{"path": "resources.jobs.job_0.tasks[0].task_key", "file": "resources/job_0.py", "line": 10, "column": 5}
+//		{"path": "resources.jobs.job_1", "file": "resources/job_1.py", "line": 5, "column": 7}
+//
+//	- resources.jobs.job_0.tasks[0].task_key is located at job_0.py:10:5
+//
+//	- resources.jobs.job_0.tasks[0].email_notifications is located at job_0.py:3:5,
+//	  because we use the location of the job as the most precise approximation.
+//
+// See pythonLocationEntry for the structure of a single entry in locations.json
+type pythonLocations struct {
+	// descendants referenced by index, e.g. '.foo'
+	keys map[string]*pythonLocations
+
+	// descendants referenced by key, e.g. '[0]'
+	indexes map[int]*pythonLocations
+
+	// location for the current node if it exists
+	location dyn.Location
+
+	// if true, location is present
+	exists bool
+}
+
+// pythonLocationEntry is a single entry in locations.json
+type pythonLocationEntry struct {
+	Path   string `json:"path"`
+	File   string `json:"file"`
+	Line   int    `json:"line"`
+	Column int    `json:"column"`
+}
+
+// mergePythonLocations applies locations from Python mutator into given dyn.Value
+//
+// The primary use-case is to merge locations.json with output.json, so that any
+// validation errors will point to Python source code instead of generated YAML.
+func mergePythonLocations(value dyn.Value, locations *pythonLocations) (dyn.Value, error) {
+	return dyn.Walk(value, func(path dyn.Path, value dyn.Value) (dyn.Value, error) {
+		newLocation, ok := findPythonLocation(locations, path)
+		if !ok {
+			return value, nil
+		}
+
+		// The first item in the list is the "last" location used for error reporting
+		//
+		// Loaded YAML uses virtual file path as location, we remove any of such references,
+		// because they should use 'newLocation' instead.
+		//
+		// We preserve any previous non-virtual locations in case when Python function modified
+		// resource defined in YAML.
+		newLocations := append(
+			[]dyn.Location{newLocation},
+			removeVirtualLocations(value.Locations())...,
+		)
+
+		return value.WithLocations(newLocations), nil
+	})
+}
+
+func removeVirtualLocations(locations []dyn.Location) []dyn.Location {
+	var newLocations []dyn.Location
+
+	for _, location := range locations {
+		if filepath.Base(location.File) == generatedFileName {
+			continue
+		}
+
+		newLocations = append(newLocations, location)
+	}
+
+	return newLocations
+}
+
+// parsePythonLocations parses locations.json from the Python mutator.
+//
+// locations file is newline-separated JSON objects with pythonLocationEntry structure.
+func parsePythonLocations(input io.Reader) (*pythonLocations, error) {
+	decoder := json.NewDecoder(input)
+	locations := newPythonLocations()
+
+	for decoder.More() {
+		var entry pythonLocationEntry
+
+		err := decoder.Decode(&entry)
+		if err != nil {
+			return nil, fmt.Errorf("failed to parse python location: %s", err)
+		}
+
+		path, err := dyn.NewPathFromString(entry.Path)
+		if err != nil {
+			return nil, fmt.Errorf("failed to parse python location: %s", err)
+		}
+
+		location := dyn.Location{
+			File:   entry.File,
+			Line:   entry.Line,
+			Column: entry.Column,
+		}
+
+		putPythonLocation(locations, path, location)
+	}
+
+	return locations, nil
+}
+
+// putPythonLocation puts the location to the trie for the given path
+func putPythonLocation(trie *pythonLocations, path dyn.Path, location dyn.Location) {
+	currentNode := trie
+
+	for _, component := range path {
+		if key := component.Key(); key != "" {
+			if _, ok := currentNode.keys[key]; !ok {
+				currentNode.keys[key] = newPythonLocations()
+			}
+
+			currentNode = currentNode.keys[key]
+		} else {
+			index := component.Index()
+			if _, ok := currentNode.indexes[index]; !ok {
+				currentNode.indexes[index] = newPythonLocations()
+			}
+
+			currentNode = currentNode.indexes[index]
+		}
+	}
+
+	currentNode.location = location
+	currentNode.exists = true
+}
+
+// newPythonLocations creates a new trie node
+func newPythonLocations() *pythonLocations {
+	return &pythonLocations{
+		keys:    make(map[string]*pythonLocations),
+		indexes: make(map[int]*pythonLocations),
+	}
+}
+
+// findPythonLocation finds the location or closest ancestor location in the trie for the given path
+// if no ancestor or exact location is found, false is returned.
+func findPythonLocation(locations *pythonLocations, path dyn.Path) (dyn.Location, bool) {
+	currentNode := locations
+	lastLocation := locations.location
+	exists := locations.exists
+
+	for _, component := range path {
+		if key := component.Key(); key != "" {
+			if _, ok := currentNode.keys[key]; !ok {
+				break
+			}
+
+			currentNode = currentNode.keys[key]
+		} else {
+			index := component.Index()
+			if _, ok := currentNode.indexes[index]; !ok {
+				break
+			}
+
+			currentNode = currentNode.indexes[index]
+		}
+
+		if currentNode.exists {
+			lastLocation = currentNode.location
+			exists = true
+		}
+	}
+
+	return lastLocation, exists
+}
diff --git a/bundle/config/mutator/python/python_locations_test.go b/bundle/config/mutator/python/python_locations_test.go
new file mode 100644
index 000000000..32afcc92b
--- /dev/null
+++ b/bundle/config/mutator/python/python_locations_test.go
@@ -0,0 +1,179 @@
+package python
+
+import (
+	"bytes"
+	"path/filepath"
+	"testing"
+
+	"github.com/databricks/cli/libs/diag"
+	"github.com/stretchr/testify/require"
+
+	"github.com/databricks/cli/libs/dyn"
+	assert "github.com/databricks/cli/libs/dyn/dynassert"
+)
+
+func TestMergeLocations(t *testing.T) {
+	pythonLocation := dyn.Location{File: "foo.py", Line: 1, Column: 1}
+	generatedLocation := dyn.Location{File: generatedFileName, Line: 1, Column: 1}
+	yamlLocation := dyn.Location{File: "foo.yml", Line: 1, Column: 1}
+
+	locations := newPythonLocations()
+	putPythonLocation(locations, dyn.MustPathFromString("foo"), pythonLocation)
+
+	input := dyn.NewValue(
+		map[string]dyn.Value{
+			"foo": dyn.NewValue(
+				map[string]dyn.Value{
+					"baz": dyn.NewValue("baz", []dyn.Location{yamlLocation}),
+					"qux": dyn.NewValue("baz", []dyn.Location{generatedLocation, yamlLocation}),
+				},
+				[]dyn.Location{},
+			),
+			"bar": dyn.NewValue("baz", []dyn.Location{generatedLocation}),
+		},
+		[]dyn.Location{yamlLocation},
+	)
+
+	expected := dyn.NewValue(
+		map[string]dyn.Value{
+			"foo": dyn.NewValue(
+				map[string]dyn.Value{
+					// pythonLocation is appended to the beginning of the list if absent
+					"baz": dyn.NewValue("baz", []dyn.Location{pythonLocation, yamlLocation}),
+					// generatedLocation is replaced by pythonLocation
+					"qux": dyn.NewValue("baz", []dyn.Location{pythonLocation, yamlLocation}),
+				},
+				[]dyn.Location{pythonLocation},
+			),
+			// if location is unknown, we keep it as-is
+			"bar": dyn.NewValue("baz", []dyn.Location{generatedLocation}),
+		},
+		[]dyn.Location{yamlLocation},
+	)
+
+	actual, err := mergePythonLocations(input, locations)
+
+	assert.NoError(t, err)
+	assert.Equal(t, expected, actual)
+}
+
+func TestFindLocation(t *testing.T) {
+	location0 := dyn.Location{File: "foo.py", Line: 1, Column: 1}
+	location1 := dyn.Location{File: "foo.py", Line: 2, Column: 1}
+
+	locations := newPythonLocations()
+	putPythonLocation(locations, dyn.MustPathFromString("foo"), location0)
+	putPythonLocation(locations, dyn.MustPathFromString("foo.bar"), location1)
+
+	actual, exists := findPythonLocation(locations, dyn.MustPathFromString("foo.bar"))
+
+	assert.True(t, exists)
+	assert.Equal(t, location1, actual)
+}
+
+func TestFindLocation_indexPathComponent(t *testing.T) {
+	location0 := dyn.Location{File: "foo.py", Line: 1, Column: 1}
+	location1 := dyn.Location{File: "foo.py", Line: 2, Column: 1}
+	location2 := dyn.Location{File: "foo.py", Line: 3, Column: 1}
+
+	locations := newPythonLocations()
+	putPythonLocation(locations, dyn.MustPathFromString("foo"), location0)
+	putPythonLocation(locations, dyn.MustPathFromString("foo.bar"), location1)
+	putPythonLocation(locations, dyn.MustPathFromString("foo.bar[0]"), location2)
+
+	actual, exists := findPythonLocation(locations, dyn.MustPathFromString("foo.bar[0]"))
+
+	assert.True(t, exists)
+	assert.Equal(t, location2, actual)
+}
+
+func TestFindLocation_closestAncestorLocation(t *testing.T) {
+	location0 := dyn.Location{File: "foo.py", Line: 1, Column: 1}
+	location1 := dyn.Location{File: "foo.py", Line: 2, Column: 1}
+
+	locations := newPythonLocations()
+	putPythonLocation(locations, dyn.MustPathFromString("foo"), location0)
+	putPythonLocation(locations, dyn.MustPathFromString("foo.bar"), location1)
+
+	actual, exists := findPythonLocation(locations, dyn.MustPathFromString("foo.bar.baz"))
+
+	assert.True(t, exists)
+	assert.Equal(t, location1, actual)
+}
+
+func TestFindLocation_unknownLocation(t *testing.T) {
+	location0 := dyn.Location{File: "foo.py", Line: 1, Column: 1}
+	location1 := dyn.Location{File: "foo.py", Line: 2, Column: 1}
+
+	locations := newPythonLocations()
+	putPythonLocation(locations, dyn.MustPathFromString("foo"), location0)
+	putPythonLocation(locations, dyn.MustPathFromString("foo.bar"), location1)
+
+	_, exists := findPythonLocation(locations, dyn.MustPathFromString("bar"))
+
+	assert.False(t, exists)
+}
+
+func TestLoadOutput(t *testing.T) {
+	location := dyn.Location{File: "my_job.py", Line: 1, Column: 1}
+	bundleRoot := t.TempDir()
+	output := `{
+		"resources": {
+			"jobs": {
+				"my_job": {
+					"name": "my_job",
+					"tasks": [
+						{
+							"task_key": "my_task",
+							"notebook_task": {
+								"notebook_path": "my_notebook"
+							}
+						}
+					]
+				}
+			}
+		}
+	}`
+
+	locations := newPythonLocations()
+	putPythonLocation(
+		locations,
+		dyn.MustPathFromString("resources.jobs.my_job"),
+		location,
+	)
+
+	value, diags := loadOutput(
+		bundleRoot,
+		bytes.NewReader([]byte(output)),
+		locations,
+	)
+
+	assert.Equal(t, diag.Diagnostics{}, diags)
+
+	name, err := dyn.Get(value, "resources.jobs.my_job.name")
+	require.NoError(t, err)
+	require.Equal(t, []dyn.Location{location}, name.Locations())
+
+	// until we implement path normalization, we have to keep locations of values
+	// that change semantic depending on their location
+	//
+	// note: it's important to have absolute path including 'bundleRoot'
+	// because mutator pipeline already has expanded locations into absolute path
+	notebookPath, err := dyn.Get(value, "resources.jobs.my_job.tasks[0].notebook_task.notebook_path")
+	require.NoError(t, err)
+	require.Len(t, notebookPath.Locations(), 1)
+	require.Equal(t, filepath.Join(bundleRoot, generatedFileName), notebookPath.Locations()[0].File)
+}
+
+func TestParsePythonLocations(t *testing.T) {
+	expected := dyn.Location{File: "foo.py", Line: 1, Column: 2}
+
+	input := `{"path": "foo", "file": "foo.py", "line": 1, "column": 2}`
+	reader := bytes.NewReader([]byte(input))
+	locations, err := parsePythonLocations(reader)
+
+	assert.NoError(t, err)
+
+	assert.True(t, locations.keys["foo"].exists)
+	assert.Equal(t, expected, locations.keys["foo"].location)
+}
diff --git a/bundle/config/mutator/python/python_mutator.go b/bundle/config/mutator/python/python_mutator.go
index 8009ab243..cd2e286e5 100644
--- a/bundle/config/mutator/python/python_mutator.go
+++ b/bundle/config/mutator/python/python_mutator.go
@@ -7,11 +7,14 @@ import (
 	"errors"
 	"fmt"
 	"io"
+	"io/fs"
 	"os"
 	"path/filepath"
 	"reflect"
 	"strings"
 
+	"github.com/databricks/cli/bundle/config/mutator/paths"
+
 	"github.com/databricks/databricks-sdk-go/logger"
 	"github.com/fatih/color"
 
@@ -124,6 +127,15 @@ type opts struct {
 	enabled bool
 
 	venvPath string
+
+	loadLocations bool
+}
+
+type runPythonMutatorOpts struct {
+	cacheDir       string
+	bundleRootPath string
+	pythonPath     string
+	loadLocations  bool
 }
 
 // getOpts adapts deprecated PyDABs and upcoming Python configuration
@@ -148,8 +160,9 @@ func getOpts(b *bundle.Bundle, phase phase) (opts, error) {
 		// don't execute for phases for 'python' section
 		if phase == PythonMutatorPhaseInit || phase == PythonMutatorPhaseLoad {
 			return opts{
-				enabled:  true,
-				venvPath: experimental.PyDABs.VEnvPath,
+				enabled:       true,
+				venvPath:      experimental.PyDABs.VEnvPath,
+				loadLocations: false, // not supported in PyDABs
 			}, nil
 		} else {
 			return opts{}, nil
@@ -158,8 +171,9 @@ func getOpts(b *bundle.Bundle, phase phase) (opts, error) {
 		// don't execute for phases for 'pydabs' section
 		if phase == PythonMutatorPhaseLoadResources || phase == PythonMutatorPhaseApplyMutators {
 			return opts{
-				enabled:  true,
-				venvPath: experimental.Python.VEnvPath,
+				enabled:       true,
+				venvPath:      experimental.Python.VEnvPath,
+				loadLocations: true,
 			}, nil
 		} else {
 			return opts{}, nil
@@ -194,7 +208,12 @@ func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagno
 			return dyn.InvalidValue, fmt.Errorf("failed to create cache dir: %w", err)
 		}
 
-		rightRoot, diags := m.runPythonMutator(ctx, cacheDir, b.BundleRootPath, pythonPath, leftRoot)
+		rightRoot, diags := m.runPythonMutator(ctx, leftRoot, runPythonMutatorOpts{
+			cacheDir:       cacheDir,
+			bundleRootPath: b.BundleRootPath,
+			pythonPath:     pythonPath,
+			loadLocations:  opts.loadLocations,
+		})
 		mutateDiags = diags
 		if diags.HasError() {
 			return dyn.InvalidValue, mutateDiagsHasError
@@ -238,13 +257,14 @@ func createCacheDir(ctx context.Context) (string, error) {
 	return os.MkdirTemp("", "-python")
 }
 
-func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) {
-	inputPath := filepath.Join(cacheDir, "input.json")
-	outputPath := filepath.Join(cacheDir, "output.json")
-	diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json")
+func (m *pythonMutator) runPythonMutator(ctx context.Context, root dyn.Value, opts runPythonMutatorOpts) (dyn.Value, diag.Diagnostics) {
+	inputPath := filepath.Join(opts.cacheDir, "input.json")
+	outputPath := filepath.Join(opts.cacheDir, "output.json")
+	diagnosticsPath := filepath.Join(opts.cacheDir, "diagnostics.json")
+	locationsPath := filepath.Join(opts.cacheDir, "locations.json")
 
 	args := []string{
-		pythonPath,
+		opts.pythonPath,
 		"-m",
 		"databricks.bundles.build",
 		"--phase",
@@ -257,6 +277,10 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath
 		diagnosticsPath,
 	}
 
+	if opts.loadLocations {
+		args = append(args, "--locations", locationsPath)
+	}
+
 	if err := writeInputFile(inputPath, root); err != nil {
 		return dyn.InvalidValue, diag.Errorf("failed to write input file: %s", err)
 	}
@@ -271,7 +295,7 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath
 	_, processErr := process.Background(
 		ctx,
 		args,
-		process.WithDir(rootPath),
+		process.WithDir(opts.bundleRootPath),
 		process.WithStderrWriter(stderrWriter),
 		process.WithStdoutWriter(stdoutWriter),
 	)
@@ -307,7 +331,12 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath
 		return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
 	}
 
-	output, outputDiags := loadOutputFile(rootPath, outputPath)
+	locations, err := loadLocationsFile(locationsPath)
+	if err != nil {
+		return dyn.InvalidValue, diag.Errorf("failed to load locations: %s", err)
+	}
+
+	output, outputDiags := loadOutputFile(opts.bundleRootPath, outputPath, locations)
 	pythonDiagnostics = pythonDiagnostics.Extend(outputDiags)
 
 	// we pass through pythonDiagnostic because it contains warnings
@@ -351,7 +380,21 @@ func writeInputFile(inputPath string, input dyn.Value) error {
 	return os.WriteFile(inputPath, rootConfigJson, 0o600)
 }
 
-func loadOutputFile(rootPath, outputPath string) (dyn.Value, diag.Diagnostics) {
+// loadLocationsFile loads locations.json containing source locations for generated YAML.
+func loadLocationsFile(locationsPath string) (*pythonLocations, error) {
+	locationsFile, err := os.Open(locationsPath)
+	if errors.Is(err, fs.ErrNotExist) {
+		return newPythonLocations(), nil
+	} else if err != nil {
+		return nil, fmt.Errorf("failed to open locations file: %w", err)
+	}
+
+	defer locationsFile.Close()
+
+	return parsePythonLocations(locationsFile)
+}
+
+func loadOutputFile(rootPath, outputPath string, locations *pythonLocations) (dyn.Value, diag.Diagnostics) {
 	outputFile, err := os.Open(outputPath)
 	if err != nil {
 		return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err))
@@ -359,15 +402,19 @@ func loadOutputFile(rootPath, outputPath string) (dyn.Value, diag.Diagnostics) {
 
 	defer outputFile.Close()
 
+	return loadOutput(rootPath, outputFile, locations)
+}
+
+func loadOutput(rootPath string, outputFile io.Reader, locations *pythonLocations) (dyn.Value, diag.Diagnostics) {
 	// we need absolute path because later parts of pipeline assume all paths are absolute
 	// and this file will be used as location to resolve relative paths.
 	//
-	// virtualPath has to stay in rootPath, because locations outside root path are not allowed:
+	// virtualPath has to stay in bundleRootPath, because locations outside root path are not allowed:
 	//
 	//   Error: path /var/folders/.../python/dist/*.whl is not contained in bundle root path
 	//
 	// for that, we pass virtualPath instead of outputPath as file location
-	virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_python__.yml"))
+	virtualPath, err := filepath.Abs(filepath.Join(rootPath, generatedFileName))
 	if err != nil {
 		return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to get absolute path: %w", err))
 	}
@@ -377,7 +424,29 @@ func loadOutputFile(rootPath, outputPath string) (dyn.Value, diag.Diagnostics) {
 		return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse output file: %w", err))
 	}
 
-	return strictNormalize(config.Root{}, generated)
+	// paths are resolved relative to locations of their values, if we change location
+	// we have to update each path, until we simplify that, we don't update locations
+	// for such values, so we don't change how paths are resolved
+	//
+	// we can remove this once we:
+	// - add variable interpolation before and after PythonMutator
+	// - implement path normalization (aka path normal form)
+	_, err = paths.VisitJobPaths(generated, func(p dyn.Path, kind paths.PathKind, v dyn.Value) (dyn.Value, error) {
+		putPythonLocation(locations, p, v.Location())
+		return v, nil
+	})
+	if err != nil {
+		return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to update locations: %w", err))
+	}
+
+	// generated has dyn.Location as if it comes from generated YAML file
+	// earlier we loaded locations.json with source locations in Python code
+	generatedWithLocations, err := mergePythonLocations(generated, locations)
+	if err != nil {
+		return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to update locations: %w", err))
+	}
+
+	return strictNormalize(config.Root{}, generatedWithLocations)
 }
 
 func strictNormalize(dst any, generated dyn.Value) (dyn.Value, diag.Diagnostics) {
diff --git a/bundle/config/mutator/python/python_mutator_test.go b/bundle/config/mutator/python/python_mutator_test.go
index d51572c8a..322fb79e8 100644
--- a/bundle/config/mutator/python/python_mutator_test.go
+++ b/bundle/config/mutator/python/python_mutator_test.go
@@ -7,7 +7,6 @@ import (
 	"os"
 	"os/exec"
 	"path/filepath"
-	"reflect"
 	"runtime"
 	"testing"
 
@@ -93,6 +92,8 @@ func TestPythonMutator_loadResources(t *testing.T) {
 			}
 		}`,
 		`{"severity": "warning", "summary": "job doesn't have any tasks", "location": {"file": "src/examples/file.py", "line": 10, "column": 5}}`,
+		`{"path": "resources.jobs.job0", "file": "src/examples/job0.py", "line": 3, "column": 5}
+		{"path": "resources.jobs.job1", "file": "src/examples/job1.py", "line": 5, "column": 7}`,
 	)
 
 	mutator := PythonMutator(PythonMutatorPhaseLoadResources)
@@ -110,6 +111,25 @@ func TestPythonMutator_loadResources(t *testing.T) {
 		assert.Equal(t, "job_1", job1.Name)
 	}
 
+	// output of locations.json should be applied to underlying dyn.Value
+	err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
+		name1, err := dyn.GetByPath(v, dyn.MustPathFromString("resources.jobs.job1.name"))
+		if err != nil {
+			return dyn.InvalidValue, err
+		}
+
+		assert.Equal(t, []dyn.Location{
+			{
+				File:   "src/examples/job1.py",
+				Line:   5,
+				Column: 7,
+			},
+		}, name1.Locations())
+
+		return v, nil
+	})
+	assert.NoError(t, err)
+
 	assert.Equal(t, 1, len(diags))
 	assert.Equal(t, "job doesn't have any tasks", diags[0].Summary)
 	assert.Equal(t, []dyn.Location{
@@ -157,7 +177,7 @@ func TestPythonMutator_loadResources_disallowed(t *testing.T) {
 					}
 				}
 			}
-		}`, "")
+		}`, "", "")
 
 	mutator := PythonMutator(PythonMutatorPhaseLoadResources)
 	diag := bundle.Apply(ctx, b, mutator)
@@ -202,7 +222,7 @@ func TestPythonMutator_applyMutators(t *testing.T) {
 					}
 				}
 			}
-		}`, "")
+		}`, "", "")
 
 	mutator := PythonMutator(PythonMutatorPhaseApplyMutators)
 	diag := bundle.Apply(ctx, b, mutator)
@@ -224,7 +244,7 @@ func TestPythonMutator_applyMutators(t *testing.T) {
 		description, err := dyn.GetByPath(v, dyn.MustPathFromString("resources.jobs.job0.description"))
 		require.NoError(t, err)
 
-		expectedVirtualPath, err := filepath.Abs("__generated_by_python__.yml")
+		expectedVirtualPath, err := filepath.Abs(generatedFileName)
 		require.NoError(t, err)
 		assert.Equal(t, expectedVirtualPath, description.Location().File)
 
@@ -263,7 +283,7 @@ func TestPythonMutator_badOutput(t *testing.T) {
 					}
 				}
 			}
-		}`, "")
+		}`, "", "")
 
 	mutator := PythonMutator(PythonMutatorPhaseLoadResources)
 	diag := bundle.Apply(ctx, b, mutator)
@@ -312,7 +332,7 @@ func TestGetOps_Python(t *testing.T) {
 	}, PythonMutatorPhaseLoadResources)
 
 	assert.NoError(t, err)
-	assert.Equal(t, opts{venvPath: ".venv", enabled: true}, actual)
+	assert.Equal(t, opts{venvPath: ".venv", enabled: true, loadLocations: true}, actual)
 }
 
 func TestGetOps_PyDABs(t *testing.T) {
@@ -328,7 +348,7 @@ func TestGetOps_PyDABs(t *testing.T) {
 	}, PythonMutatorPhaseInit)
 
 	assert.NoError(t, err)
-	assert.Equal(t, opts{venvPath: ".venv", enabled: true}, actual)
+	assert.Equal(t, opts{venvPath: ".venv", enabled: true, loadLocations: false}, actual)
 }
 
 func TestGetOps_empty(t *testing.T) {
@@ -661,7 +681,7 @@ or activate the environment before running CLI commands:
 	assert.Equal(t, expected, out)
 }
 
-func withProcessStub(t *testing.T, args []string, output, diagnostics string) context.Context {
+func withProcessStub(t *testing.T, args []string, output, diagnostics, locations string) context.Context {
 	ctx := context.Background()
 	ctx, stub := process.WithStub(ctx)
 
@@ -673,32 +693,51 @@ func withProcessStub(t *testing.T, args []string, output, diagnostics string) co
 
 	inputPath := filepath.Join(cacheDir, "input.json")
 	outputPath := filepath.Join(cacheDir, "output.json")
+	locationsPath := filepath.Join(cacheDir, "locations.json")
 	diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json")
 
-	args = append(args, "--input", inputPath)
-	args = append(args, "--output", outputPath)
-	args = append(args, "--diagnostics", diagnosticsPath)
-
 	stub.WithCallback(func(actual *exec.Cmd) error {
 		_, err := os.Stat(inputPath)
 		assert.NoError(t, err)
 
-		if reflect.DeepEqual(actual.Args, args) {
-			err := os.WriteFile(outputPath, []byte(output), 0o600)
-			require.NoError(t, err)
+		actualInputPath := getArg(actual.Args, "--input")
+		actualOutputPath := getArg(actual.Args, "--output")
+		actualDiagnosticsPath := getArg(actual.Args, "--diagnostics")
+		actualLocationsPath := getArg(actual.Args, "--locations")
 
-			err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0o600)
-			require.NoError(t, err)
+		require.Equal(t, inputPath, actualInputPath)
+		require.Equal(t, outputPath, actualOutputPath)
+		require.Equal(t, diagnosticsPath, actualDiagnosticsPath)
 
-			return nil
-		} else {
-			return fmt.Errorf("unexpected command: %v", actual.Args)
+		// locations is an optional argument
+		if locations != "" {
+			require.Equal(t, locationsPath, actualLocationsPath)
+
+			err = os.WriteFile(locationsPath, []byte(locations), 0o600)
+			require.NoError(t, err)
 		}
+
+		err = os.WriteFile(outputPath, []byte(output), 0o600)
+		require.NoError(t, err)
+
+		err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0o600)
+		require.NoError(t, err)
+
+		return nil
 	})
 
 	return ctx
 }
 
+func getArg(args []string, name string) string {
+	for i := range args {
+		if args[i] == name {
+			return args[i+1]
+		}
+	}
+	return ""
+}
+
 func loadYaml(name, content string) *bundle.Bundle {
 	v, diag := config.LoadFromBytes(name, []byte(content))
 

From 20c1902a4515ea464d03f6b9a8c2e7ede94e0ab4 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Wed, 22 Jan 2025 17:26:16 +0100
Subject: [PATCH 03/39] Fix passing SingleTest to TestAccept (#2210)

---
 acceptance/acceptance_test.go | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 9a4564ffa..e611f4e50 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -28,8 +28,8 @@ var KeepTmp bool
 
 // In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
 // Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
-// example: var singleTest = "bundle/variables/empty"
-var singleTest = ""
+// example: var SingleTest = "bundle/variables/empty"
+var SingleTest = ""
 
 // If enabled, instead of compiling and running CLI externally, we'll start in-process server that accepts and runs
 // CLI commands. The $CLI in test scripts is a helper that just forwards command-line arguments to this server (see bin/callserver.py).
@@ -37,7 +37,7 @@ var singleTest = ""
 var InprocessMode bool
 
 func init() {
-	flag.BoolVar(&InprocessMode, "inprocess", singleTest != "", "Run CLI in the same process as test (for debugging)")
+	flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
 	flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
 }
 
@@ -54,7 +54,7 @@ var Scripts = map[string]bool{
 }
 
 func TestAccept(t *testing.T) {
-	testAccept(t, InprocessMode, "")
+	testAccept(t, InprocessMode, SingleTest)
 }
 
 func TestInprocessMode(t *testing.T) {

From ba3a400327833caa822c8a0416808b072c86c264 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Thu, 23 Jan 2025 11:59:01 +0100
Subject: [PATCH 04/39] Remove test-specific logic from generic test runner
 (#2215)

Revert changes to acceptance_test.go added in #2177 and add
test-specific fix.
---
 acceptance/acceptance_test.go                 | 23 -------------------
 .../experimental-jobs-as-code/script          |  2 ++
 2 files changed, 2 insertions(+), 23 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index e611f4e50..56db6ec20 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -9,7 +9,6 @@ import (
 	"os"
 	"os/exec"
 	"path/filepath"
-	"regexp"
 	"runtime"
 	"slices"
 	"sort"
@@ -452,16 +451,6 @@ func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
 }
 
 func ListDir(t *testing.T, src string) ([]string, error) {
-	// exclude folders in .gitignore from comparison
-	ignored := []string{
-		"\\.ruff_cache",
-		"\\.venv",
-		".*\\.egg-info",
-		"__pycache__",
-		// depends on uv version
-		"uv.lock",
-	}
-
 	var files []string
 	err := filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
 		if err != nil {
@@ -469,19 +458,7 @@ func ListDir(t *testing.T, src string) ([]string, error) {
 		}
 
 		if info.IsDir() {
-			for _, ignoredFolder := range ignored {
-				if matched, _ := regexp.MatchString(ignoredFolder, info.Name()); matched {
-					return filepath.SkipDir
-				}
-			}
-
 			return nil
-		} else {
-			for _, ignoredFolder := range ignored {
-				if matched, _ := regexp.MatchString(ignoredFolder, info.Name()); matched {
-					return nil
-				}
-			}
 		}
 
 		relPath, err := filepath.Rel(src, path)
diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/script b/acceptance/bundle/templates/experimental-jobs-as-code/script
index 2209aa7ab..af28b9d0a 100644
--- a/acceptance/bundle/templates/experimental-jobs-as-code/script
+++ b/acceptance/bundle/templates/experimental-jobs-as-code/script
@@ -10,3 +10,5 @@ cat databricks.yml | grep -v databricks_cli_version > databricks.yml.new
 mv databricks.yml.new databricks.yml
 
 trace $CLI bundle validate -t dev --output json | jq ".resources"
+
+rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info

From f60ad32f07241b311192c1476e32ef8656e3c6f2 Mon Sep 17 00:00:00 2001
From: Ilya Kuznetsov <ilya.kuznetsov@databricks.com>
Date: Thu, 23 Jan 2025 12:11:44 +0100
Subject: [PATCH 05/39] Allow yaml-anchors in schema (#2200)

## Changes

Allows custom untyped fields in the root config in json-schema so it
doesn't highlight errors when using yaml-anchors.

Example use case:

```
tags: &job-tags
  environment: ${bundle.target}


resources:
  jobs:
    db1:
      tags:
        <<: *job-tags
    db1:
      tags:
        <<: *job-tags
```

One downside is that we don't highlight any unknown top-level properties
anymore (but they will still fail during CLI validation)

## Tests

Manually checked behavior in VSCode - it doesn't show validation error.
Also checked that other typed properties are still suggested
---
 bundle/internal/schema/main.go                        |  9 +++++++++
 .../schema/testdata/fail/unknown_top_level_field.yml  |  1 -
 bundle/internal/schema/testdata/pass/yaml_anchors.yml | 11 +++++++++++
 bundle/schema/jsonschema.json                         |  2 +-
 4 files changed, 21 insertions(+), 2 deletions(-)
 delete mode 100644 bundle/internal/schema/testdata/fail/unknown_top_level_field.yml
 create mode 100644 bundle/internal/schema/testdata/pass/yaml_anchors.yml

diff --git a/bundle/internal/schema/main.go b/bundle/internal/schema/main.go
index 39b859656..38e099ece 100644
--- a/bundle/internal/schema/main.go
+++ b/bundle/internal/schema/main.go
@@ -172,6 +172,15 @@ func generateSchema(workdir, outputFile string) {
 		a.addAnnotations,
 		addInterpolationPatterns,
 	})
+
+	// AdditionalProperties is set to an empty schema to allow non-typed keys used as yaml-anchors
+	// Example:
+	// some_anchor: &some_anchor
+	//   file_path: /some/path/
+	// workspace:
+	//   <<: *some_anchor
+	s.AdditionalProperties = jsonschema.Schema{}
+
 	if err != nil {
 		log.Fatal(err)
 	}
diff --git a/bundle/internal/schema/testdata/fail/unknown_top_level_field.yml b/bundle/internal/schema/testdata/fail/unknown_top_level_field.yml
deleted file mode 100644
index e8a8866bc..000000000
--- a/bundle/internal/schema/testdata/fail/unknown_top_level_field.yml
+++ /dev/null
@@ -1 +0,0 @@
-unknown: value
diff --git a/bundle/internal/schema/testdata/pass/yaml_anchors.yml b/bundle/internal/schema/testdata/pass/yaml_anchors.yml
new file mode 100644
index 000000000..18749891d
--- /dev/null
+++ b/bundle/internal/schema/testdata/pass/yaml_anchors.yml
@@ -0,0 +1,11 @@
+tags: &job-tags
+  environment: "some_environment"
+
+resources:
+  jobs:
+    db1:
+      tags:
+        <<: *job-tags
+    db2:
+      tags:
+        <<: *job-tags
diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json
index b3158792c..4a3b56814 100644
--- a/bundle/schema/jsonschema.json
+++ b/bundle/schema/jsonschema.json
@@ -7269,5 +7269,5 @@
       "$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace"
     }
   },
-  "additionalProperties": false
+  "additionalProperties": {}
 }
\ No newline at end of file

From 798189eb96bc1184119dc039a2728f87b4ce6212 Mon Sep 17 00:00:00 2001
From: Andrew Nester <andrew.nester@databricks.com>
Date: Thu, 23 Jan 2025 12:17:52 +0100
Subject: [PATCH 06/39] Upgrade Go SDK to 0.56.0 (#2214)

## Changes

Upgrade Go SDK to 0.56.0

Relevant changes:
- Support Query parameters for all HTTP operations
(https://github.com/databricks/databricks-sdk-go/pull/1124).
---
 .codegen/_openapi_sha                         |   2 +-
 .codegen/service.go.tmpl                      |  20 +-
 .gitattributes                                |   1 +
 bundle/deploy/terraform/convert_test.go       |   4 +-
 .../convert_model_serving_endpoint_test.go    |   2 +-
 .../internal/schema/annotations_openapi.yml   | 367 +++++++++++-------
 .../schema/annotations_openapi_overrides.yml  |  11 +
 bundle/schema/jsonschema.json                 | 175 +++++----
 .../custom-app-integration.go                 |   1 +
 cmd/api/api.go                                |   2 +-
 .../access-control/access-control.go          | 109 ++++++
 cmd/workspace/cmd.go                          |   2 +
 cmd/workspace/providers/providers.go          |   4 +-
 cmd/workspace/recipients/recipients.go        |  96 ++---
 .../serving-endpoints/serving-endpoints.go    | 111 +++++-
 go.mod                                        |   2 +-
 go.sum                                        |   4 +-
 integration/cmd/sync/sync_test.go             |   2 +-
 libs/filer/files_client.go                    |   4 +-
 libs/filer/workspace_files_client.go          |   5 +-
 .../workspace_files_extensions_client_test.go |   2 +-
 libs/git/info.go                              |   1 +
 22 files changed, 588 insertions(+), 339 deletions(-)
 create mode 100755 cmd/workspace/access-control/access-control.go

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index dfe78790a..588cf9d63 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-779817ed8d63031f5ea761fbd25ee84f38feec0d
\ No newline at end of file
+0be1b914249781b5e903b7676fd02255755bc851
\ No newline at end of file
diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl
index 0c9fa089a..2f4987b13 100644
--- a/.codegen/service.go.tmpl
+++ b/.codegen/service.go.tmpl
@@ -109,16 +109,19 @@ var {{.CamelName}}Overrides []func(
 	{{- end }}
 )
 
+{{- $excludeFromJson := list "http-request"}}
+
 func new{{.PascalName}}() *cobra.Command {
 	cmd := &cobra.Command{}
 
+	{{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}}
 	{{- if .Request}}
 
 	var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
 	{{- if .RequestBodyField }}
 	{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
 	{{- end }}
-	{{- if .CanUseJson}}
+	{{- if $canUseJson}}
 	var {{.CamelName}}Json flags.JsonFlag
 	{{- end}}
 	{{- end}}
@@ -135,7 +138,7 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $request = .RequestBodyField.Entity -}}
 	{{- end -}}
     {{if $request }}// TODO: short flags
-	{{- if .CanUseJson}}
+	{{- if $canUseJson}}
 	cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
 	{{- end}}
     {{$method := .}}
@@ -177,7 +180,7 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $hasRequiredArgs :=  and (not $hasIdPrompt) $hasPosArgs -}}
 	{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields))  -}}
 	{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
-	{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}}
+	{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
 	{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
 
 	{{- $atleastOneArgumentWithDescription := false -}}
@@ -239,7 +242,7 @@ func new{{.PascalName}}() *cobra.Command {
 		ctx := cmd.Context()
 		{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
 		{{- if .Request }}
-			{{ if .CanUseJson }}
+			{{ if $canUseJson }}
 			if cmd.Flags().Changed("json") {
 					diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
 					if diags.HasError() {
@@ -255,7 +258,7 @@ func new{{.PascalName}}() *cobra.Command {
 				return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
 			}{{- end}}
 			{{- if $hasPosArgs }}
-			{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
+			{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
 			{{- end}}
 			{{- if $hasIdPrompt}}
 				if len(args) == 0 {
@@ -279,9 +282,9 @@ func new{{.PascalName}}() *cobra.Command {
 
 			{{$method := .}}
 			{{- range $arg, $field := .RequiredPositionalArguments}}
-				{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}}
+				{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}}
 			{{- end -}}
-			{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
+			{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
 			}
 			{{- end}}
 
@@ -392,7 +395,8 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $method := .Method -}}
 	{{- $arg := .Arg -}}
 	{{- $hasIdPrompt := .HasIdPrompt -}}
-	{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }}
+	{{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }}
+	{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }}
 	{{- if $optionalIfJsonIsUsed  }}
 	if !cmd.Flags().Changed("json") {
 	{{- end }}
diff --git a/.gitattributes b/.gitattributes
index 0a8ddf3cb..ebe94ed8e 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -31,6 +31,7 @@ cmd/account/users/users.go linguist-generated=true
 cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
 cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
 cmd/account/workspaces/workspaces.go linguist-generated=true
+cmd/workspace/access-control/access-control.go linguist-generated=true
 cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
 cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
 cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go
index ffe55db71..afc1fb22a 100644
--- a/bundle/deploy/terraform/convert_test.go
+++ b/bundle/deploy/terraform/convert_test.go
@@ -419,7 +419,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
 	src := resources.ModelServingEndpoint{
 		CreateServingEndpoint: &serving.CreateServingEndpoint{
 			Name: "name",
-			Config: serving.EndpointCoreConfigInput{
+			Config: &serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
@@ -474,7 +474,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
 			// and as such observed the `omitempty` tag.
 			// The new method leverages [dyn.Value] where any field that is not
 			// explicitly set is not part of the value.
-			Config: serving.EndpointCoreConfigInput{
+			Config: &serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
index d46350bb7..98cf2dc22 100644
--- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
+++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
@@ -17,7 +17,7 @@ func TestConvertModelServingEndpoint(t *testing.T) {
 	src := resources.ModelServingEndpoint{
 		CreateServingEndpoint: &serving.CreateServingEndpoint{
 			Name: "name",
-			Config: serving.EndpointCoreConfigInput{
+			Config: &serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml
index 8ff5c9253..d5a9bf69e 100644
--- a/bundle/internal/schema/annotations_openapi.yml
+++ b/bundle/internal/schema/annotations_openapi.yml
@@ -353,12 +353,12 @@ github.com/databricks/cli/bundle/config/resources.MlflowModel:
 github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
   "ai_gateway":
     "description": |-
-      The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.
+      The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported.
   "config":
     "description": |-
       The core config of the serving endpoint.
   "name":
-    "description": |
+    "description": |-
       The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.
       An endpoint name can consist of alphanumeric characters, dashes, and underscores.
   "rate_limits":
@@ -1974,6 +1974,9 @@ github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask:
       Parameters passed to the main method.
 
       Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
+  "run_as_repl":
+    "description": |-
+      Deprecated. A value of `false` is no longer supported.
 github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask:
   "parameters":
     "description": |-
@@ -2684,27 +2687,36 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScd
 github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
   "ai21labs_api_key":
     "description": |-
-      The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
+      The Databricks secret key reference for an AI21 Labs API key. If you
+      prefer to paste your API key directly, see `ai21labs_api_key_plaintext`.
+      You must provide an API key using one of the following fields:
+      `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
   "ai21labs_api_key_plaintext":
     "description": |-
-      An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
+      An AI21 Labs API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `ai21labs_api_key`. You
+      must provide an API key using one of the following fields:
+      `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig:
   "guardrails":
     "description": |-
       Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
   "inference_table_config":
     "description": |-
-      Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.
+      Configuration for payload logging using inference tables.
+      Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.
   "rate_limits":
     "description": |-
       Configuration for rate limits which can be set to limit endpoint traffic.
   "usage_tracking_config":
     "description": |-
-      Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.
+      Configuration to enable usage tracking using system tables.
+      These tables allow you to monitor operational usage on endpoints and their associated costs.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters:
   "invalid_keywords":
     "description": |-
-      List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.
+      List of invalid keywords.
+      AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.
   "pii":
     "description": |-
       Configuration for guardrail PII filter.
@@ -2713,15 +2725,14 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParame
       Indicates whether the safety filter is enabled.
   "valid_topics":
     "description": |-
-      The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.
+      The list of allowed topics.
+      Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior:
   "behavior":
     "description": |-
-      Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
+      Configuration for input guardrail filters.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior:
   "_":
-    "description": |-
-      Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
     "enum":
       - |-
         NONE
@@ -2737,30 +2748,32 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails:
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig:
   "catalog_name":
     "description": |-
-      The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.
+      The name of the catalog in Unity Catalog. Required when enabling inference tables.
+      NOTE: On update, you have to disable inference table first in order to change the catalog name.
   "enabled":
     "description": |-
       Indicates whether the inference table is enabled.
   "schema_name":
     "description": |-
-      The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.
+      The name of the schema in Unity Catalog. Required when enabling inference tables.
+      NOTE: On update, you have to disable inference table first in order to change the schema name.
   "table_name_prefix":
     "description": |-
-      The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.
+      The prefix of the table in Unity Catalog.
+      NOTE: On update, you have to disable inference table first in order to change the prefix name.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit:
   "calls":
     "description": |-
       Used to specify how many calls are allowed for a key within the renewal_period.
   "key":
     "description": |-
-      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
+      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,
+      with 'endpoint' being the default if not specified.
   "renewal_period":
     "description": |-
       Renewal period field for a rate limit. Currently, only 'minute' is supported.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey:
   "_":
-    "description": |-
-      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
     "enum":
       - |-
         user
@@ -2768,8 +2781,6 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey:
         endpoint
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod:
   "_":
-    "description": |-
-      Renewal period field for a rate limit. Currently, only 'minute' is supported.
     "enum":
       - |-
         minute
@@ -2780,26 +2791,43 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingCo
 github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig:
   "aws_access_key_id":
     "description": |-
-      The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.
+      The Databricks secret key reference for an AWS access key ID with
+      permissions to interact with Bedrock services. If you prefer to paste
+      your API key directly, see `aws_access_key_id_plaintext`. You must provide an API
+      key using one of the following fields: `aws_access_key_id` or
+      `aws_access_key_id_plaintext`.
   "aws_access_key_id_plaintext":
     "description": |-
-      An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.
+      An AWS access key ID with permissions to interact with Bedrock services
+      provided as a plaintext string. If you prefer to reference your key using
+      Databricks Secrets, see `aws_access_key_id`. You must provide an API key
+      using one of the following fields: `aws_access_key_id` or
+      `aws_access_key_id_plaintext`.
   "aws_region":
     "description": |-
       The AWS region to use. Bedrock has to be enabled there.
   "aws_secret_access_key":
     "description": |-
-      The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.
+      The Databricks secret key reference for an AWS secret access key paired
+      with the access key ID, with permissions to interact with Bedrock
+      services. If you prefer to paste your API key directly, see
+      `aws_secret_access_key_plaintext`. You must provide an API key using one
+      of the following fields: `aws_secret_access_key` or
+      `aws_secret_access_key_plaintext`.
   "aws_secret_access_key_plaintext":
     "description": |-
-      An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.
+      An AWS secret access key paired with the access key ID, with permissions
+      to interact with Bedrock services provided as a plaintext string. If you
+      prefer to reference your key using Databricks Secrets, see
+      `aws_secret_access_key`. You must provide an API key using one of the
+      following fields: `aws_secret_access_key` or
+      `aws_secret_access_key_plaintext`.
   "bedrock_provider":
     "description": |-
-      The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
+      The underlying provider in Amazon Bedrock. Supported values (case
+      insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
 github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider:
   "_":
-    "description": |-
-      The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
     "enum":
       - |-
         anthropic
@@ -2812,10 +2840,16 @@ github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedro
 github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig:
   "anthropic_api_key":
     "description": |-
-      The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.
+      The Databricks secret key reference for an Anthropic API key. If you
+      prefer to paste your API key directly, see `anthropic_api_key_plaintext`.
+      You must provide an API key using one of the following fields:
+      `anthropic_api_key` or `anthropic_api_key_plaintext`.
   "anthropic_api_key_plaintext":
     "description": |-
-      The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.
+      The Anthropic API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `anthropic_api_key`. You
+      must provide an API key using one of the following fields:
+      `anthropic_api_key` or `anthropic_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput:
   "catalog_name":
     "description": |-
@@ -2831,42 +2865,58 @@ github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput:
       The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.
 github.com/databricks/databricks-sdk-go/service/serving.CohereConfig:
   "cohere_api_base":
-    "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n"
+    "description": |-
+      This is an optional field to provide a customized base URL for the Cohere
+      API. If left unspecified, the standard Cohere base URL is used.
   "cohere_api_key":
     "description": |-
-      The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.
+      The Databricks secret key reference for a Cohere API key. If you prefer
+      to paste your API key directly, see `cohere_api_key_plaintext`. You must
+      provide an API key using one of the following fields: `cohere_api_key` or
+      `cohere_api_key_plaintext`.
   "cohere_api_key_plaintext":
     "description": |-
-      The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.
+      The Cohere API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `cohere_api_key`. You
+      must provide an API key using one of the following fields:
+      `cohere_api_key` or `cohere_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig:
   "databricks_api_token":
-    "description": |
-      The Databricks secret key reference for a Databricks API token that corresponds to a user or service
-      principal with Can Query access to the model serving endpoint pointed to by this external model.
-      If you prefer to paste your API key directly, see `databricks_api_token_plaintext`.
-      You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.
+    "description": |-
+      The Databricks secret key reference for a Databricks API token that
+      corresponds to a user or service principal with Can Query access to the
+      model serving endpoint pointed to by this external model. If you prefer
+      to paste your API key directly, see `databricks_api_token_plaintext`. You
+      must provide an API key using one of the following fields:
+      `databricks_api_token` or `databricks_api_token_plaintext`.
   "databricks_api_token_plaintext":
-    "description": |
-      The Databricks API token that corresponds to a user or service
-      principal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.
-      If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.
-      You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.
+    "description": |-
+      The Databricks API token that corresponds to a user or service principal
+      with Can Query access to the model serving endpoint pointed to by this
+      external model provided as a plaintext string. If you prefer to reference
+      your key using Databricks Secrets, see `databricks_api_token`. You must
+      provide an API key using one of the following fields:
+      `databricks_api_token` or `databricks_api_token_plaintext`.
   "databricks_workspace_url":
-    "description": |
-      The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.
+    "description": |-
+      The URL of the Databricks workspace containing the model serving endpoint
+      pointed to by this external model.
 github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput:
   "auto_capture_config":
     "description": |-
       Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+      Note: this field is deprecated for creating new provisioned throughput endpoints,
+      or updating existing provisioned throughput endpoints that never have inference table configured;
+      in these cases please use AI Gateway to manage inference tables.
   "served_entities":
     "description": |-
-      A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.
+      The list of served entities under the serving endpoint config.
   "served_models":
     "description": |-
-      (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.
+      (Deprecated, use served_entities instead) The list of served models under the serving endpoint config.
   "traffic_config":
     "description": |-
-      The traffic config defining how invocations to the serving endpoint should be routed.
+      The traffic configuration associated with the serving endpoint config.
 github.com/databricks/databricks-sdk-go/service/serving.EndpointTag:
   "key":
     "description": |-
@@ -2903,17 +2953,13 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModel:
     "description": |-
       PaLM Config. Only required if the provider is 'palm'.
   "provider":
-    "description": |
-      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',
-      'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
+    "description": |-
+      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.
   "task":
     "description": |-
       The task type of the external model.
 github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider:
   "_":
-    "description": |
-      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',
-      'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
     "enum":
       - |-
         ai21labs
@@ -2934,70 +2980,114 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider:
 github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig:
   "private_key":
     "description": |-
-      The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`
+      The Databricks secret key reference for a private key for the service
+      account which has access to the Google Cloud Vertex AI Service. See [Best
+      practices for managing service account keys]. If you prefer to paste your
+      API key directly, see `private_key_plaintext`. You must provide an API
+      key using one of the following fields: `private_key` or
+      `private_key_plaintext`
+
+      [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys
   "private_key_plaintext":
     "description": |-
-      The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.
+      The private key for the service account which has access to the Google
+      Cloud Vertex AI Service provided as a plaintext secret. See [Best
+      practices for managing service account keys]. If you prefer to reference
+      your key using Databricks Secrets, see `private_key`. You must provide an
+      API key using one of the following fields: `private_key` or
+      `private_key_plaintext`.
+
+      [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys
   "project_id":
     "description": |-
-      This is the Google Cloud project id that the service account is associated with.
+      This is the Google Cloud project id that the service account is
+      associated with.
   "region":
     "description": |-
-      This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.
+      This is the region for the Google Cloud Vertex AI Service. See [supported
+      regions] for more details. Some models are only available in specific
+      regions.
+
+      [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations
 github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig:
+  "_":
+    "description": |-
+      Configs needed to create an OpenAI model route.
   "microsoft_entra_client_id":
-    "description": |
-      This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.
+    "description": |-
+      This field is only required for Azure AD OpenAI and is the Microsoft
+      Entra Client ID.
   "microsoft_entra_client_secret":
-    "description": |
-      The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.
-      If you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.
-      You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.
+    "description": |-
+      The Databricks secret key reference for a client secret used for
+      Microsoft Entra ID authentication. If you prefer to paste your client
+      secret directly, see `microsoft_entra_client_secret_plaintext`. You must
+      provide an API key using one of the following fields:
+      `microsoft_entra_client_secret` or
+      `microsoft_entra_client_secret_plaintext`.
   "microsoft_entra_client_secret_plaintext":
-    "description": |
-      The client secret used for Microsoft Entra ID authentication provided as a plaintext string.
-      If you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.
-      You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.
+    "description": |-
+      The client secret used for Microsoft Entra ID authentication provided as
+      a plaintext string. If you prefer to reference your key using Databricks
+      Secrets, see `microsoft_entra_client_secret`. You must provide an API key
+      using one of the following fields: `microsoft_entra_client_secret` or
+      `microsoft_entra_client_secret_plaintext`.
   "microsoft_entra_tenant_id":
-    "description": |
-      This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.
+    "description": |-
+      This field is only required for Azure AD OpenAI and is the Microsoft
+      Entra Tenant ID.
   "openai_api_base":
-    "description": |
-      This is a field to provide a customized base URl for the OpenAI API.
-      For Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service
-      provided by Azure.
-      For other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.
+    "description": |-
+      This is a field to provide a customized base URl for the OpenAI API. For
+      Azure OpenAI, this field is required, and is the base URL for the Azure
+      OpenAI API service provided by Azure. For other OpenAI API types, this
+      field is optional, and if left unspecified, the standard OpenAI base URL
+      is used.
   "openai_api_key":
     "description": |-
-      The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.
+      The Databricks secret key reference for an OpenAI API key using the
+      OpenAI or Azure service. If you prefer to paste your API key directly,
+      see `openai_api_key_plaintext`. You must provide an API key using one of
+      the following fields: `openai_api_key` or `openai_api_key_plaintext`.
   "openai_api_key_plaintext":
     "description": |-
-      The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.
+      The OpenAI API key using the OpenAI or Azure service provided as a
+      plaintext string. If you prefer to reference your key using Databricks
+      Secrets, see `openai_api_key`. You must provide an API key using one of
+      the following fields: `openai_api_key` or `openai_api_key_plaintext`.
   "openai_api_type":
-    "description": |
-      This is an optional field to specify the type of OpenAI API to use.
-      For Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security
-      access validation protocol. For access token validation, use azure. For authentication using Azure Active
+    "description": |-
+      This is an optional field to specify the type of OpenAI API to use. For
+      Azure OpenAI, this field is required, and adjust this parameter to
+      represent the preferred security access validation protocol. For access
+      token validation, use azure. For authentication using Azure Active
       Directory (Azure AD) use, azuread.
   "openai_api_version":
-    "description": |
-      This is an optional field to specify the OpenAI API version.
-      For Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to
-      utilize, specified by a date.
+    "description": |-
+      This is an optional field to specify the OpenAI API version. For Azure
+      OpenAI, this field is required, and is the version of the Azure OpenAI
+      service to utilize, specified by a date.
   "openai_deployment_name":
-    "description": |
-      This field is only required for Azure OpenAI and is the name of the deployment resource for the
-      Azure OpenAI service.
+    "description": |-
+      This field is only required for Azure OpenAI and is the name of the
+      deployment resource for the Azure OpenAI service.
   "openai_organization":
-    "description": |
-      This is an optional field to specify the organization in OpenAI or Azure OpenAI.
+    "description": |-
+      This is an optional field to specify the organization in OpenAI or Azure
+      OpenAI.
 github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig:
   "palm_api_key":
     "description": |-
-      The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.
+      The Databricks secret key reference for a PaLM API key. If you prefer to
+      paste your API key directly, see `palm_api_key_plaintext`. You must
+      provide an API key using one of the following fields: `palm_api_key` or
+      `palm_api_key_plaintext`.
   "palm_api_key_plaintext":
     "description": |-
-      The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.
+      The PaLM API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `palm_api_key`. You must
+      provide an API key using one of the following fields: `palm_api_key` or
+      `palm_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.RateLimit:
   "calls":
     "description": |-
@@ -3010,8 +3100,6 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimit:
       Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
 github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey:
   "_":
-    "description": |-
-      Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
     "enum":
       - |-
         user
@@ -3019,8 +3107,6 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey:
         endpoint
 github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod:
   "_":
-    "description": |-
-      Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
     "enum":
       - |-
         minute
@@ -3033,21 +3119,15 @@ github.com/databricks/databricks-sdk-go/service/serving.Route:
       The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.
 github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
   "entity_name":
-    "description": |
-      The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),
-      or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of
-      __catalog_name__.__schema_name__.__model_name__.
-  "entity_version":
     "description": |-
-      The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.
+      The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.
+  "entity_version": {}
   "environment_vars":
-    "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`"
+    "description": |-
+      An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
   "external_model":
-    "description": |
-      The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)
-      can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,
-      it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.
-      The task type of all external models within an endpoint must be the same.
+    "description": |-
+      The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.
   "instance_profile_arn":
     "description": |-
       ARN of the instance profile that the served entity uses to access AWS resources.
@@ -3058,68 +3138,46 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
     "description": |-
       The minimum tokens per second that the endpoint can scale down to.
   "name":
-    "description": |
-      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.
-      If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other
-      entities, it defaults to <entity-name>-<entity-version>.
+    "description": |-
+      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.
   "scale_to_zero_enabled":
     "description": |-
       Whether the compute resources for the served entity should scale down to zero.
   "workload_size":
-    "description": |
-      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.
-      A single unit of provisioned concurrency can process one request at a time.
-      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
-      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
+    "description": |-
+      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
   "workload_type":
-    "description": |
-      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
-      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
-      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
+    "description": |-
+      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
   "environment_vars":
-    "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`"
+    "description": |-
+      An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
   "instance_profile_arn":
     "description": |-
-      ARN of the instance profile that the served model will use to access AWS resources.
+      ARN of the instance profile that the served entity uses to access AWS resources.
   "max_provisioned_throughput":
     "description": |-
       The maximum tokens per second that the endpoint can scale up to.
   "min_provisioned_throughput":
     "description": |-
       The minimum tokens per second that the endpoint can scale down to.
-  "model_name":
-    "description": |
-      The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,
-      in the form of __catalog_name__.__schema_name__.__model_name__.
-  "model_version":
-    "description": |-
-      The version of the model in Databricks Model Registry or Unity Catalog to be served.
+  "model_name": {}
+  "model_version": {}
   "name":
-    "description": |
-      The name of a served model. It must be unique across an endpoint. If not specified, this field will default to <model-name>-<model-version>.
-      A served model name can consist of alphanumeric characters, dashes, and underscores.
+    "description": |-
+      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.
   "scale_to_zero_enabled":
     "description": |-
-      Whether the compute resources for the served model should scale down to zero.
+      Whether the compute resources for the served entity should scale down to zero.
   "workload_size":
-    "description": |
-      The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.
-      A single unit of provisioned concurrency can process one request at a time.
-      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
-      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
+    "description": |-
+      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
   "workload_type":
-    "description": |
-      The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
-      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
-      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
+    "description": |-
+      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize:
   "_":
-    "description": |
-      The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.
-      A single unit of provisioned concurrency can process one request at a time.
-      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
-      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
     "enum":
       - |-
         Small
@@ -3129,17 +3187,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkload
         Large
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType:
   "_":
-    "description": |
-      The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
-      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
-      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
     "enum":
       - |-
         CPU
+      - |-
+        GPU_MEDIUM
       - |-
         GPU_SMALL
+      - |-
+        GPU_LARGE
+      - |-
+        MULTIGPU_MEDIUM
+github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType:
+  "_":
+    "enum":
+      - |-
+        CPU
       - |-
         GPU_MEDIUM
+      - |-
+        GPU_SMALL
       - |-
         GPU_LARGE
       - |-
diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml
index 120a12543..323432fa3 100644
--- a/bundle/internal/schema/annotations_openapi_overrides.yml
+++ b/bundle/internal/schema/annotations_openapi_overrides.yml
@@ -197,3 +197,14 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
   "manual":
     "description": |-
       PLACEHOLDER
+github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
+  "entity_version":
+    "description": |-
+      PLACEHOLDER
+github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
+  "model_name":
+    "description": |-
+      PLACEHOLDER
+  "model_version":
+    "description": |-
+      PLACEHOLDER
diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json
index 4a3b56814..17a621ba0 100644
--- a/bundle/schema/jsonschema.json
+++ b/bundle/schema/jsonschema.json
@@ -546,7 +546,7 @@
                     "type": "object",
                     "properties": {
                       "ai_gateway": {
-                        "description": "The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.",
+                        "description": "The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported.",
                         "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig"
                       },
                       "config": {
@@ -554,7 +554,7 @@
                         "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput"
                       },
                       "name": {
-                        "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.\n",
+                        "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.",
                         "$ref": "#/$defs/string"
                       },
                       "permissions": {
@@ -575,7 +575,6 @@
                     },
                     "additionalProperties": false,
                     "required": [
-                      "config",
                       "name"
                     ]
                   },
@@ -4142,6 +4141,10 @@
                     "parameters": {
                       "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.",
                       "$ref": "#/$defs/slice/string"
+                    },
+                    "run_as_repl": {
+                      "description": "Deprecated. A value of `false` is no longer supported.",
+                      "$ref": "#/$defs/bool"
                     }
                   },
                   "additionalProperties": false
@@ -5502,11 +5505,11 @@
                   "type": "object",
                   "properties": {
                     "ai21labs_api_key": {
-                      "description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "ai21labs_api_key_plaintext": {
-                      "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
+                      "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5528,7 +5531,7 @@
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails"
                     },
                     "inference_table_config": {
-                      "description": "Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.",
+                      "description": "Configuration for payload logging using inference tables.\nUse these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig"
                     },
                     "rate_limits": {
@@ -5536,7 +5539,7 @@
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit"
                     },
                     "usage_tracking_config": {
-                      "description": "Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.",
+                      "description": "Configuration to enable usage tracking using system tables.\nThese tables allow you to monitor operational usage on endpoints and their associated costs.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig"
                     }
                   },
@@ -5554,7 +5557,7 @@
                   "type": "object",
                   "properties": {
                     "invalid_keywords": {
-                      "description": "List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.",
+                      "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.",
                       "$ref": "#/$defs/slice/string"
                     },
                     "pii": {
@@ -5566,7 +5569,7 @@
                       "$ref": "#/$defs/bool"
                     },
                     "valid_topics": {
-                      "description": "The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.",
+                      "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.",
                       "$ref": "#/$defs/slice/string"
                     }
                   },
@@ -5584,14 +5587,11 @@
                   "type": "object",
                   "properties": {
                     "behavior": {
-                      "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.",
+                      "description": "Configuration for input guardrail filters.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior"
                     }
                   },
-                  "additionalProperties": false,
-                  "required": [
-                    "behavior"
-                  ]
+                  "additionalProperties": false
                 },
                 {
                   "type": "string",
@@ -5603,7 +5603,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.",
                   "enum": [
                     "NONE",
                     "BLOCK"
@@ -5643,7 +5642,7 @@
                   "type": "object",
                   "properties": {
                     "catalog_name": {
-                      "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.",
+                      "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the catalog name.",
                       "$ref": "#/$defs/string"
                     },
                     "enabled": {
@@ -5651,11 +5650,11 @@
                       "$ref": "#/$defs/bool"
                     },
                     "schema_name": {
-                      "description": "The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.",
+                      "description": "The name of the schema in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the schema name.",
                       "$ref": "#/$defs/string"
                     },
                     "table_name_prefix": {
-                      "description": "The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.",
+                      "description": "The prefix of the table in Unity Catalog.\nNOTE: On update, you have to disable inference table first in order to change the prefix name.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5674,10 +5673,10 @@
                   "properties": {
                     "calls": {
                       "description": "Used to specify how many calls are allowed for a key within the renewal_period.",
-                      "$ref": "#/$defs/int"
+                      "$ref": "#/$defs/int64"
                     },
                     "key": {
-                      "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
+                      "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey"
                     },
                     "renewal_period": {
@@ -5701,7 +5700,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
                   "enum": [
                     "user",
                     "endpoint"
@@ -5717,7 +5715,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.",
                   "enum": [
                     "minute"
                   ]
@@ -5752,11 +5749,11 @@
                   "type": "object",
                   "properties": {
                     "aws_access_key_id": {
-                      "description": "The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
+                      "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_access_key_id_plaintext": {
-                      "description": "An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
+                      "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_region": {
@@ -5764,15 +5761,15 @@
                       "$ref": "#/$defs/string"
                     },
                     "aws_secret_access_key": {
-                      "description": "The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_secret_access_key_plaintext": {
-                      "description": "An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
+                      "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "bedrock_provider": {
-                      "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
+                      "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider"
                     }
                   },
@@ -5792,7 +5789,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
                   "enum": [
                     "anthropic",
                     "cohere",
@@ -5812,11 +5808,11 @@
                   "type": "object",
                   "properties": {
                     "anthropic_api_key": {
-                      "description": "The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "anthropic_api_key_plaintext": {
-                      "description": "The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
+                      "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5864,15 +5860,15 @@
                   "type": "object",
                   "properties": {
                     "cohere_api_base": {
-                      "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n",
+                      "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.",
                       "$ref": "#/$defs/string"
                     },
                     "cohere_api_key": {
-                      "description": "The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "cohere_api_key_plaintext": {
-                      "description": "The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
+                      "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5890,15 +5886,15 @@
                   "type": "object",
                   "properties": {
                     "databricks_api_token": {
-                      "description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\nIf you prefer to paste your API key directly, see `databricks_api_token_plaintext`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
+                      "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "databricks_api_token_plaintext": {
-                      "description": "The Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
+                      "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "databricks_workspace_url": {
-                      "description": "The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.\n",
+                      "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5919,19 +5915,19 @@
                   "type": "object",
                   "properties": {
                     "auto_capture_config": {
-                      "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
+                      "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput"
                     },
                     "served_entities": {
-                      "description": "A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.",
+                      "description": "The list of served entities under the serving endpoint config.",
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput"
                     },
                     "served_models": {
-                      "description": "(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.",
+                      "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.",
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput"
                     },
                     "traffic_config": {
-                      "description": "The traffic config defining how invocations to the serving endpoint should be routed.",
+                      "description": "The traffic configuration associated with the serving endpoint config.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig"
                     }
                   },
@@ -6010,7 +6006,7 @@
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig"
                     },
                     "provider": {
-                      "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n",
+                      "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider"
                     },
                     "task": {
@@ -6035,7 +6031,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n",
                   "enum": [
                     "ai21labs",
                     "anthropic",
@@ -6059,23 +6054,27 @@
                   "type": "object",
                   "properties": {
                     "private_key": {
-                      "description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`",
+                      "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys",
                       "$ref": "#/$defs/string"
                     },
                     "private_key_plaintext": {
-                      "description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.",
+                      "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys",
                       "$ref": "#/$defs/string"
                     },
                     "project_id": {
-                      "description": "This is the Google Cloud project id that the service account is associated with.",
+                      "description": "This is the Google Cloud project id that the service account is\nassociated with.",
                       "$ref": "#/$defs/string"
                     },
                     "region": {
-                      "description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.",
+                      "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations",
                       "$ref": "#/$defs/string"
                     }
                   },
-                  "additionalProperties": false
+                  "additionalProperties": false,
+                  "required": [
+                    "project_id",
+                    "region"
+                  ]
                 },
                 {
                   "type": "string",
@@ -6087,49 +6086,50 @@
               "oneOf": [
                 {
                   "type": "object",
+                  "description": "Configs needed to create an OpenAI model route.",
                   "properties": {
                     "microsoft_entra_client_id": {
-                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n",
+                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_client_secret": {
-                      "description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
+                      "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_client_secret_plaintext": {
-                      "description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
+                      "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_tenant_id": {
-                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n",
+                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_base": {
-                      "description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n",
+                      "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_key": {
-                      "description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_key_plaintext": {
-                      "description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
+                      "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_type": {
-                      "description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n",
+                      "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_version": {
-                      "description": "This is an optional field to specify the OpenAI API version.\nFor Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to\nutilize, specified by a date.\n",
+                      "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_deployment_name": {
-                      "description": "This field is only required for Azure OpenAI and is the name of the deployment resource for the\nAzure OpenAI service.\n",
+                      "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_organization": {
-                      "description": "This is an optional field to specify the organization in OpenAI or Azure OpenAI.\n",
+                      "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -6147,11 +6147,11 @@
                   "type": "object",
                   "properties": {
                     "palm_api_key": {
-                      "description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "palm_api_key_plaintext": {
-                      "description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
+                      "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -6170,7 +6170,7 @@
                   "properties": {
                     "calls": {
                       "description": "Used to specify how many calls are allowed for a key within the renewal_period.",
-                      "$ref": "#/$defs/int"
+                      "$ref": "#/$defs/int64"
                     },
                     "key": {
                       "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
@@ -6197,7 +6197,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
                   "enum": [
                     "user",
                     "endpoint"
@@ -6213,7 +6212,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.",
                   "enum": [
                     "minute"
                   ]
@@ -6256,19 +6254,18 @@
                   "type": "object",
                   "properties": {
                     "entity_name": {
-                      "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),\nor a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of\n__catalog_name__.__schema_name__.__model_name__.\n",
+                      "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.",
                       "$ref": "#/$defs/string"
                     },
                     "entity_version": {
-                      "description": "The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.",
                       "$ref": "#/$defs/string"
                     },
                     "environment_vars": {
-                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
+                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
                       "$ref": "#/$defs/map/string"
                     },
                     "external_model": {
-                      "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,\nit cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.\nThe task type of all external models within an endpoint must be the same.\n",
+                      "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel"
                     },
                     "instance_profile_arn": {
@@ -6284,7 +6281,7 @@
                       "$ref": "#/$defs/int"
                     },
                     "name": {
-                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.\nIf not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other\nentities, it defaults to \u003centity-name\u003e-\u003centity-version\u003e.\n",
+                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.",
                       "$ref": "#/$defs/string"
                     },
                     "scale_to_zero_enabled": {
@@ -6292,12 +6289,12 @@
                       "$ref": "#/$defs/bool"
                     },
                     "workload_size": {
-                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.\n",
+                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.",
                       "$ref": "#/$defs/string"
                     },
                     "workload_type": {
-                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
-                      "$ref": "#/$defs/string"
+                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).",
+                      "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType"
                     }
                   },
                   "additionalProperties": false
@@ -6314,11 +6311,11 @@
                   "type": "object",
                   "properties": {
                     "environment_vars": {
-                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
+                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
                       "$ref": "#/$defs/map/string"
                     },
                     "instance_profile_arn": {
-                      "description": "ARN of the instance profile that the served model will use to access AWS resources.",
+                      "description": "ARN of the instance profile that the served entity uses to access AWS resources.",
                       "$ref": "#/$defs/string"
                     },
                     "max_provisioned_throughput": {
@@ -6330,27 +6327,25 @@
                       "$ref": "#/$defs/int"
                     },
                     "model_name": {
-                      "description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n",
                       "$ref": "#/$defs/string"
                     },
                     "model_version": {
-                      "description": "The version of the model in Databricks Model Registry or Unity Catalog to be served.",
                       "$ref": "#/$defs/string"
                     },
                     "name": {
-                      "description": "The name of a served model. It must be unique across an endpoint. If not specified, this field will default to \u003cmodel-name\u003e-\u003cmodel-version\u003e.\nA served model name can consist of alphanumeric characters, dashes, and underscores.\n",
+                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.",
                       "$ref": "#/$defs/string"
                     },
                     "scale_to_zero_enabled": {
-                      "description": "Whether the compute resources for the served model should scale down to zero.",
+                      "description": "Whether the compute resources for the served entity should scale down to zero.",
                       "$ref": "#/$defs/bool"
                     },
                     "workload_size": {
-                      "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n",
+                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize"
                     },
                     "workload_type": {
-                      "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
+                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType"
                     }
                   },
@@ -6371,7 +6366,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n",
                   "enum": [
                     "Small",
                     "Medium",
@@ -6388,11 +6382,28 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
                   "enum": [
                     "CPU",
-                    "GPU_SMALL",
                     "GPU_MEDIUM",
+                    "GPU_SMALL",
+                    "GPU_LARGE",
+                    "MULTIGPU_MEDIUM"
+                  ]
+                },
+                {
+                  "type": "string",
+                  "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
+                }
+              ]
+            },
+            "serving.ServingModelWorkloadType": {
+              "oneOf": [
+                {
+                  "type": "string",
+                  "enum": [
+                    "CPU",
+                    "GPU_MEDIUM",
+                    "GPU_SMALL",
                     "GPU_LARGE",
                     "MULTIGPU_MEDIUM"
                   ]
diff --git a/cmd/account/custom-app-integration/custom-app-integration.go b/cmd/account/custom-app-integration/custom-app-integration.go
index 1eec1018e..43e458bc6 100755
--- a/cmd/account/custom-app-integration/custom-app-integration.go
+++ b/cmd/account/custom-app-integration/custom-app-integration.go
@@ -307,6 +307,7 @@ func newUpdate() *cobra.Command {
 	cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	// TODO: array: redirect_urls
+	// TODO: array: scopes
 	// TODO: complex arg: token_access_policy
 
 	cmd.Use = "update INTEGRATION_ID"
diff --git a/cmd/api/api.go b/cmd/api/api.go
index c3a3eb0b6..fad8a026f 100644
--- a/cmd/api/api.go
+++ b/cmd/api/api.go
@@ -62,7 +62,7 @@ func makeCommand(method string) *cobra.Command {
 
 			var response any
 			headers := map[string]string{"Content-Type": "application/json"}
-			err = api.Do(cmd.Context(), method, path, headers, request, &response)
+			err = api.Do(cmd.Context(), method, path, headers, nil, request, &response)
 			if err != nil {
 				return err
 			}
diff --git a/cmd/workspace/access-control/access-control.go b/cmd/workspace/access-control/access-control.go
new file mode 100755
index 000000000..7668265fb
--- /dev/null
+++ b/cmd/workspace/access-control/access-control.go
@@ -0,0 +1,109 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package access_control
+
+import (
+	"fmt"
+
+	"github.com/databricks/cli/cmd/root"
+	"github.com/databricks/cli/libs/cmdio"
+	"github.com/databricks/cli/libs/flags"
+	"github.com/databricks/databricks-sdk-go/service/iam"
+	"github.com/spf13/cobra"
+)
+
+// Slice with functions to override default command behavior.
+// Functions can be added from the `init()` function in manually curated files in this directory.
+var cmdOverrides []func(*cobra.Command)
+
+func New() *cobra.Command {
+	cmd := &cobra.Command{
+		Use:     "access-control",
+		Short:   `Rule based Access Control for Databricks Resources.`,
+		Long:    `Rule based Access Control for Databricks Resources.`,
+		GroupID: "iam",
+		Annotations: map[string]string{
+			"package": "iam",
+		},
+
+		// This service is being previewed; hide from help output.
+		Hidden: true,
+	}
+
+	// Add methods
+	cmd.AddCommand(newCheckPolicy())
+
+	// Apply optional overrides to this command.
+	for _, fn := range cmdOverrides {
+		fn(cmd)
+	}
+
+	return cmd
+}
+
+// start check-policy command
+
+// Slice with functions to override default command behavior.
+// Functions can be added from the `init()` function in manually curated files in this directory.
+var checkPolicyOverrides []func(
+	*cobra.Command,
+	*iam.CheckPolicyRequest,
+)
+
+func newCheckPolicy() *cobra.Command {
+	cmd := &cobra.Command{}
+
+	var checkPolicyReq iam.CheckPolicyRequest
+	var checkPolicyJson flags.JsonFlag
+
+	// TODO: short flags
+	cmd.Flags().Var(&checkPolicyJson, "json", `either inline JSON string or @path/to/file.json with request body`)
+
+	// TODO: complex arg: resource_info
+
+	cmd.Use = "check-policy"
+	cmd.Short = `Check access policy to a resource.`
+	cmd.Long = `Check access policy to a resource.`
+
+	cmd.Annotations = make(map[string]string)
+
+	cmd.PreRunE = root.MustWorkspaceClient
+	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
+		ctx := cmd.Context()
+		w := root.WorkspaceClient(ctx)
+
+		if cmd.Flags().Changed("json") {
+			diags := checkPolicyJson.Unmarshal(&checkPolicyReq)
+			if diags.HasError() {
+				return diags.Error()
+			}
+			if len(diags) > 0 {
+				err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags)
+				if err != nil {
+					return err
+				}
+			}
+		} else {
+			return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
+		}
+
+		response, err := w.AccessControl.CheckPolicy(ctx, checkPolicyReq)
+		if err != nil {
+			return err
+		}
+		return cmdio.Render(ctx, response)
+	}
+
+	// Disable completions since they are not applicable.
+	// Can be overridden by manual implementation in `override.go`.
+	cmd.ValidArgsFunction = cobra.NoFileCompletions
+
+	// Apply optional overrides to this command.
+	for _, fn := range checkPolicyOverrides {
+		fn(cmd, &checkPolicyReq)
+	}
+
+	return cmd
+}
+
+// end service AccessControl
diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go
index f07d0cf76..c447bd736 100755
--- a/cmd/workspace/cmd.go
+++ b/cmd/workspace/cmd.go
@@ -3,6 +3,7 @@
 package workspace
 
 import (
+	access_control "github.com/databricks/cli/cmd/workspace/access-control"
 	alerts "github.com/databricks/cli/cmd/workspace/alerts"
 	alerts_legacy "github.com/databricks/cli/cmd/workspace/alerts-legacy"
 	apps "github.com/databricks/cli/cmd/workspace/apps"
@@ -96,6 +97,7 @@ import (
 func All() []*cobra.Command {
 	var out []*cobra.Command
 
+	out = append(out, access_control.New())
 	out = append(out, alerts.New())
 	out = append(out, alerts_legacy.New())
 	out = append(out, apps.New())
diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go
index 504beac5e..4d6262cff 100755
--- a/cmd/workspace/providers/providers.go
+++ b/cmd/workspace/providers/providers.go
@@ -64,7 +64,7 @@ func newCreate() *cobra.Command {
 	cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`)
-	cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`)
+	cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`)
 
 	cmd.Use = "create NAME AUTHENTICATION_TYPE"
 	cmd.Short = `Create an auth provider.`
@@ -430,7 +430,7 @@ func newUpdate() *cobra.Command {
 	cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`)
 	cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the provider.`)
 	cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`)
-	cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`)
+	cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`)
 
 	cmd.Use = "update NAME"
 	cmd.Short = `Update a provider.`
diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go
index 56abd2014..6d6ce42f1 100755
--- a/cmd/workspace/recipients/recipients.go
+++ b/cmd/workspace/recipients/recipients.go
@@ -91,7 +91,7 @@ func newCreate() *cobra.Command {
 	cmd.Long = `Create a share recipient.
   
   Creates a new recipient with the delta sharing authentication type in the
-  metastore. The caller must be a metastore admin or has the
+  metastore. The caller must be a metastore admin or have the
   **CREATE_RECIPIENT** privilege on the metastore.
 
   Arguments:
@@ -186,28 +186,16 @@ func newDelete() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "Name of the recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have name of the recipient")
-		}
 		deleteReq.Name = args[0]
 
 		err = w.Recipients.Delete(ctx, deleteReq)
@@ -258,28 +246,16 @@ func newGet() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "Name of the recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have name of the recipient")
-		}
 		getReq.Name = args[0]
 
 		response, err := w.Recipients.Get(ctx, getReq)
@@ -384,7 +360,7 @@ func newRotateToken() *cobra.Command {
   the provided token info. The caller must be the owner of the recipient.
 
   Arguments:
-    NAME: The name of the recipient.
+    NAME: The name of the Recipient.
     EXISTING_TOKEN_EXPIRE_IN_SECONDS: The expiration time of the bearer token in ISO 8601 format. This will set
       the expiration_time of existing token only to a smaller timestamp, it
       cannot extend the expiration_time. Use 0 to expire the existing token
@@ -479,28 +455,16 @@ func newSharePermissions() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "The name of the Recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have the name of the recipient")
-		}
 		sharePermissionsReq.Name = args[0]
 
 		response, err := w.Recipients.SharePermissions(ctx, sharePermissionsReq)
@@ -560,6 +524,11 @@ func newUpdate() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
@@ -577,30 +546,13 @@ func newUpdate() *cobra.Command {
 				}
 			}
 		}
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "Name of the recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have name of the recipient")
-		}
 		updateReq.Name = args[0]
 
-		err = w.Recipients.Update(ctx, updateReq)
+		response, err := w.Recipients.Update(ctx, updateReq)
 		if err != nil {
 			return err
 		}
-		return nil
+		return cmdio.Render(ctx, response)
 	}
 
 	// Disable completions since they are not applicable.
diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go
index cc99177c7..034133623 100755
--- a/cmd/workspace/serving-endpoints/serving-endpoints.go
+++ b/cmd/workspace/serving-endpoints/serving-endpoints.go
@@ -49,6 +49,7 @@ func New() *cobra.Command {
 	cmd.AddCommand(newGetOpenApi())
 	cmd.AddCommand(newGetPermissionLevels())
 	cmd.AddCommand(newGetPermissions())
+	cmd.AddCommand(newHttpRequest())
 	cmd.AddCommand(newList())
 	cmd.AddCommand(newLogs())
 	cmd.AddCommand(newPatch())
@@ -153,16 +154,34 @@ func newCreate() *cobra.Command {
 	cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	// TODO: complex arg: ai_gateway
+	// TODO: complex arg: config
 	// TODO: array: rate_limits
 	cmd.Flags().BoolVar(&createReq.RouteOptimized, "route-optimized", createReq.RouteOptimized, `Enable route optimization for the serving endpoint.`)
 	// TODO: array: tags
 
-	cmd.Use = "create"
+	cmd.Use = "create NAME"
 	cmd.Short = `Create a new serving endpoint.`
-	cmd.Long = `Create a new serving endpoint.`
+	cmd.Long = `Create a new serving endpoint.
+
+  Arguments:
+    NAME: The name of the serving endpoint. This field is required and must be
+      unique across a Databricks workspace. An endpoint name can consist of
+      alphanumeric characters, dashes, and underscores.`
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		if cmd.Flags().Changed("json") {
+			err := root.ExactArgs(0)(cmd, args)
+			if err != nil {
+				return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name' in your JSON input")
+			}
+			return nil
+		}
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
@@ -179,8 +198,9 @@ func newCreate() *cobra.Command {
 					return err
 				}
 			}
-		} else {
-			return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
+		}
+		if !cmd.Flags().Changed("json") {
+			createReq.Name = args[0]
 		}
 
 		wait, err := w.ServingEndpoints.Create(ctx, createReq)
@@ -233,10 +253,7 @@ func newDelete() *cobra.Command {
 
 	cmd.Use = "delete NAME"
 	cmd.Short = `Delete a serving endpoint.`
-	cmd.Long = `Delete a serving endpoint.
-
-  Arguments:
-    NAME: The name of the serving endpoint. This field is required.`
+	cmd.Long = `Delete a serving endpoint.`
 
 	cmd.Annotations = make(map[string]string)
 
@@ -432,11 +449,12 @@ func newGetOpenApi() *cobra.Command {
 
 		getOpenApiReq.Name = args[0]
 
-		err = w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq)
+		response, err := w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq)
 		if err != nil {
 			return err
 		}
-		return nil
+		defer response.Contents.Close()
+		return cmdio.Render(ctx, response.Contents)
 	}
 
 	// Disable completions since they are not applicable.
@@ -568,6 +586,77 @@ func newGetPermissions() *cobra.Command {
 	return cmd
 }
 
+// start http-request command
+
+// Slice with functions to override default command behavior.
+// Functions can be added from the `init()` function in manually curated files in this directory.
+var httpRequestOverrides []func(
+	*cobra.Command,
+	*serving.ExternalFunctionRequest,
+)
+
+func newHttpRequest() *cobra.Command {
+	cmd := &cobra.Command{}
+
+	var httpRequestReq serving.ExternalFunctionRequest
+
+	// TODO: short flags
+
+	cmd.Flags().StringVar(&httpRequestReq.Headers, "headers", httpRequestReq.Headers, `Additional headers for the request.`)
+	cmd.Flags().StringVar(&httpRequestReq.Json, "json", httpRequestReq.Json, `The JSON payload to send in the request body.`)
+	cmd.Flags().StringVar(&httpRequestReq.Params, "params", httpRequestReq.Params, `Query parameters for the request.`)
+
+	cmd.Use = "http-request CONNECTION_NAME METHOD PATH"
+	cmd.Short = `Make external services call using the credentials stored in UC Connection.`
+	cmd.Long = `Make external services call using the credentials stored in UC Connection.
+
+  Arguments:
+    CONNECTION_NAME: The connection name to use. This is required to identify the external
+      connection.
+    METHOD: The HTTP method to use (e.g., 'GET', 'POST').
+    PATH: The relative path for the API endpoint. This is required.`
+
+	// This command is being previewed; hide from help output.
+	cmd.Hidden = true
+
+	cmd.Annotations = make(map[string]string)
+
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(3)
+		return check(cmd, args)
+	}
+
+	cmd.PreRunE = root.MustWorkspaceClient
+	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
+		ctx := cmd.Context()
+		w := root.WorkspaceClient(ctx)
+
+		httpRequestReq.ConnectionName = args[0]
+		_, err = fmt.Sscan(args[1], &httpRequestReq.Method)
+		if err != nil {
+			return fmt.Errorf("invalid METHOD: %s", args[1])
+		}
+		httpRequestReq.Path = args[2]
+
+		response, err := w.ServingEndpoints.HttpRequest(ctx, httpRequestReq)
+		if err != nil {
+			return err
+		}
+		return cmdio.Render(ctx, response)
+	}
+
+	// Disable completions since they are not applicable.
+	// Can be overridden by manual implementation in `override.go`.
+	cmd.ValidArgsFunction = cobra.NoFileCompletions
+
+	// Apply optional overrides to this command.
+	for _, fn := range httpRequestOverrides {
+		fn(cmd, &httpRequestReq)
+	}
+
+	return cmd
+}
+
 // start list command
 
 // Slice with functions to override default command behavior.
@@ -849,7 +938,7 @@ func newPutAiGateway() *cobra.Command {
 	cmd.Long = `Update AI Gateway of a serving endpoint.
   
   Used to update the AI Gateway of a serving endpoint. NOTE: Only external model
-  endpoints are currently supported.
+  and provisioned throughput endpoints are currently supported.
 
   Arguments:
     NAME: The name of the serving endpoint whose AI Gateway is being updated. This
diff --git a/go.mod b/go.mod
index 0ef800d7b..4a3bf1620 100644
--- a/go.mod
+++ b/go.mod
@@ -7,7 +7,7 @@ toolchain go1.23.4
 require (
 	github.com/Masterminds/semver/v3 v3.3.1 // MIT
 	github.com/briandowns/spinner v1.23.1 // Apache 2.0
-	github.com/databricks/databricks-sdk-go v0.55.0 // Apache 2.0
+	github.com/databricks/databricks-sdk-go v0.56.0 // Apache 2.0
 	github.com/fatih/color v1.18.0 // MIT
 	github.com/google/uuid v1.6.0 // BSD-3-Clause
 	github.com/hashicorp/go-version v1.7.0 // MPL 2.0
diff --git a/go.sum b/go.sum
index b1364cb26..b4e92c2c9 100644
--- a/go.sum
+++ b/go.sum
@@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
 github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
 github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo=
 github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
-github.com/databricks/databricks-sdk-go v0.55.0 h1:ReziD6spzTDltM0ml80LggKo27F3oUjgTinCFDJDnak=
-github.com/databricks/databricks-sdk-go v0.55.0/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI=
+github.com/databricks/databricks-sdk-go v0.56.0 h1:8BsqjrSLbm2ET+/SLCN8qD+v+HFvs891dzi1OaiyRfc=
+github.com/databricks/databricks-sdk-go v0.56.0/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
diff --git a/integration/cmd/sync/sync_test.go b/integration/cmd/sync/sync_test.go
index 632497054..88e6ed89a 100644
--- a/integration/cmd/sync/sync_test.go
+++ b/integration/cmd/sync/sync_test.go
@@ -158,7 +158,7 @@ func (a *syncTest) remoteFileContent(ctx context.Context, relativePath, expected
 
 	var res []byte
 	a.c.Eventually(func() bool {
-		err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &res)
+		err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &res)
 		require.NoError(a.t, err)
 		actualContent := string(res)
 		return actualContent == expectedContent
diff --git a/libs/filer/files_client.go b/libs/filer/files_client.go
index 88bbadd32..7102b6e29 100644
--- a/libs/filer/files_client.go
+++ b/libs/filer/files_client.go
@@ -148,7 +148,7 @@ func (w *FilesClient) Write(ctx context.Context, name string, reader io.Reader,
 	overwrite := slices.Contains(mode, OverwriteIfExists)
 	urlPath = fmt.Sprintf("%s?overwrite=%t", urlPath, overwrite)
 	headers := map[string]string{"Content-Type": "application/octet-stream"}
-	err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, reader, nil)
+	err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, nil, reader, nil)
 
 	// Return early on success.
 	if err == nil {
@@ -176,7 +176,7 @@ func (w *FilesClient) Read(ctx context.Context, name string) (io.ReadCloser, err
 	}
 
 	var reader io.ReadCloser
-	err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &reader)
+	err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &reader)
 
 	// Return early on success.
 	if err == nil {
diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go
index 8d5148edd..f7e1b3adb 100644
--- a/libs/filer/workspace_files_client.go
+++ b/libs/filer/workspace_files_client.go
@@ -106,7 +106,7 @@ func (info *wsfsFileInfo) MarshalJSON() ([]byte, error) {
 // as an interface to allow for mocking in tests.
 type apiClient interface {
 	Do(ctx context.Context, method, path string,
-		headers map[string]string, request, response any,
+		headers map[string]string, queryParams map[string]any, request, response any,
 		visitors ...func(*http.Request) error) error
 }
 
@@ -156,7 +156,7 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io
 		return err
 	}
 
-	err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, body, nil)
+	err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, nil, body, nil)
 
 	// Return early on success.
 	if err == nil {
@@ -341,6 +341,7 @@ func (w *WorkspaceFilesClient) Stat(ctx context.Context, name string) (fs.FileIn
 		http.MethodGet,
 		"/api/2.0/workspace/get-status",
 		nil,
+		nil,
 		map[string]string{
 			"path":               absPath,
 			"return_export_info": "true",
diff --git a/libs/filer/workspace_files_extensions_client_test.go b/libs/filer/workspace_files_extensions_client_test.go
index 9ea837fa9..f9c65d6ee 100644
--- a/libs/filer/workspace_files_extensions_client_test.go
+++ b/libs/filer/workspace_files_extensions_client_test.go
@@ -17,7 +17,7 @@ type mockApiClient struct {
 }
 
 func (m *mockApiClient) Do(ctx context.Context, method, path string,
-	headers map[string]string, request, response any,
+	headers map[string]string, queryParams map[string]any, request, response any,
 	visitors ...func(*http.Request) error,
 ) error {
 	args := m.Called(ctx, method, path, headers, request, response, visitors)
diff --git a/libs/git/info.go b/libs/git/info.go
index 46e57be48..dc4af9b6d 100644
--- a/libs/git/info.go
+++ b/libs/git/info.go
@@ -66,6 +66,7 @@ func fetchRepositoryInfoAPI(ctx context.Context, path string, w *databricks.Work
 		http.MethodGet,
 		apiEndpoint,
 		nil,
+		nil,
 		map[string]string{
 			"path":            path,
 			"return_git_info": "true",

From 1f63aa0912705f6722873c8d4d1389c398f4d2df Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Thu, 23 Jan 2025 12:46:22 +0100
Subject: [PATCH 07/39] tests: Improve reporting in case of FS errors (#2216)

## Changes
If there are unreadable files in a directory, raise an error but
continue with further diagnostics, because the answer is in the script
output.

## Tests
Manually - I'm working on some tests that create unreadable files, the
report is much better with this change.
---
 acceptance/acceptance_test.go | 16 +++++++++++-----
 1 file changed, 11 insertions(+), 5 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 56db6ec20..96c1f651c 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -232,8 +232,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
 	}
 
 	// Make sure there are not unaccounted for new files
-	files, err := ListDir(t, tmpDir)
-	require.NoError(t, err)
+	files := ListDir(t, tmpDir)
 	for _, relPath := range files {
 		if _, ok := inputs[relPath]; ok {
 			continue
@@ -450,11 +449,15 @@ func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
 	})
 }
 
-func ListDir(t *testing.T, src string) ([]string, error) {
+func ListDir(t *testing.T, src string) []string {
 	var files []string
 	err := filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
 		if err != nil {
-			return err
+			// Do not FailNow here.
+			// The output comparison is happening after this call which includes output.txt which
+			// includes errors printed by commands which include explanation why a given file cannot be read.
+			t.Errorf("Error when listing %s: path=%s: %s", src, path, err)
+			return nil
 		}
 
 		if info.IsDir() {
@@ -469,5 +472,8 @@ func ListDir(t *testing.T, src string) ([]string, error) {
 		files = append(files, relPath)
 		return nil
 	})
-	return files, err
+	if err != nil {
+		t.Errorf("Failed to list %s: %s", src, err)
+	}
+	return files
 }

From ddd45e25ee24cfad9ec5834ed66b71bb278b168d Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Thu, 23 Jan 2025 13:48:47 +0100
Subject: [PATCH 08/39] Pass USE_SDK_V2_{RESOURCES,DATA_SOURCES} to terraform
 (#2207)

## Changes
- Propagate env vars USE_SDK_V2_RESOURCES and $USE_SDK_V2_DATA_SOURCES
to terraform
- This are troubleshooting helpers for resources migrated to new plugin
framework, recommended here:
https://registry.terraform.io/providers/databricks/databricks/latest/docs/guides/troubleshooting#plugin-framework-migration-problems
- This current unblocks deploying quality monitors, see
https://github.com/databricks/terraform-provider-databricks/issues/4229#issuecomment-2520344690

## Tests
Manually testing that I can deploy quality monitor after this change
with `USE_SDK_V2_RESOURCES="databricks_quality_monitor"` set

### Main branch:
```
~/work/databricks_quality_monitor_repro % USE_SDK_V2_RESOURCES="databricks_quality_monitor" ../cli/cli-main bundle deploy
Uploading bundle files to /Workspace/Users/denis.bilenko@databricks.com/.bundle/quality_monitor_bundle/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
Error: terraform apply: exit status 1

Error: Provider produced inconsistent result after apply

When applying changes to databricks_quality_monitor.monitor_trips, provider
"provider[\"registry.terraform.io/databricks/databricks\"]" produced an
unexpected new value: .data_classification_config: block count changed from 0
to 1.

This is a bug in the provider, which should be reported in the provider's own
issue tracker.
```

### This branch:
```
~/work/databricks_quality_monitor_repro % USE_SDK_V2_RESOURCES="databricks_quality_monitor" ../cli/cli bundle deploy
Uploading bundle files to /Workspace/Users/denis.bilenko@databricks.com/.bundle/quality_monitor_bundle/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
```

### Config:
```
~/work/databricks_quality_monitor_repro % cat databricks.yml
bundle:
  name: quality_monitor_bundle

resources:
  quality_monitors:
    monitor_trips:
      table_name: main.denis-bilenko-cuj-pe34.trips_sanitized_1
      output_schema_name: main.denis-bilenko-cuj-pe34
      assets_dir: /Workspace/Users/${workspace.current_user.userName}/quality_monitor_issue
      snapshot: {}
```
---
 bundle/deploy/terraform/init.go | 8 ++++++++
 1 file changed, 8 insertions(+)

diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go
index 6a014a7c1..5957611a4 100644
--- a/bundle/deploy/terraform/init.go
+++ b/bundle/deploy/terraform/init.go
@@ -108,6 +108,14 @@ var envCopy = []string{
 	// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
 	// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration
 	"TF_CLI_CONFIG_FILE",
+
+	// Include $USE_SDK_V2_RESOURCES and $USE_SDK_V2_DATA_SOURCES, these are used to switch back from plugin framework to SDKv2.
+	// This is used for mitigation issues with resource migrated to plugin framework, as recommended here:
+	// https://registry.terraform.io/providers/databricks/databricks/latest/docs/guides/troubleshooting#plugin-framework-migration-problems
+	// It is currently a workaround for deploying quality_monitors
+	// https://github.com/databricks/terraform-provider-databricks/issues/4229#issuecomment-2520344690
+	"USE_SDK_V2_RESOURCES",
+	"USE_SDK_V2_DATA_SOURCES",
 }
 
 // This function inherits some environment variables for Terraform CLI.

From 6153423c56ff56583087e0fa1c92a02a2eb3dca2 Mon Sep 17 00:00:00 2001
From: Andrew Nester <andrew.nester@databricks.com>
Date: Thu, 23 Jan 2025 14:21:59 +0100
Subject: [PATCH 09/39] Revert "Upgrade Go SDK to 0.56.0 (#2214)" (#2217)

This reverts commit 798189eb96bc1184119dc039a2728f87b4ce6212.
---
 .codegen/_openapi_sha                         |   2 +-
 .codegen/service.go.tmpl                      |  20 +-
 .gitattributes                                |   1 -
 bundle/deploy/terraform/convert_test.go       |   4 +-
 .../convert_model_serving_endpoint_test.go    |   2 +-
 .../internal/schema/annotations_openapi.yml   | 367 +++++++-----------
 .../schema/annotations_openapi_overrides.yml  |  11 -
 bundle/schema/jsonschema.json                 | 173 ++++-----
 .../custom-app-integration.go                 |   1 -
 cmd/api/api.go                                |   2 +-
 .../access-control/access-control.go          | 109 ------
 cmd/workspace/cmd.go                          |   2 -
 cmd/workspace/providers/providers.go          |   4 +-
 cmd/workspace/recipients/recipients.go        |  96 +++--
 .../serving-endpoints/serving-endpoints.go    | 111 +-----
 go.mod                                        |   2 +-
 go.sum                                        |   4 +-
 integration/cmd/sync/sync_test.go             |   2 +-
 libs/filer/files_client.go                    |   4 +-
 libs/filer/workspace_files_client.go          |   5 +-
 .../workspace_files_extensions_client_test.go |   2 +-
 libs/git/info.go                              |   1 -
 22 files changed, 338 insertions(+), 587 deletions(-)
 delete mode 100755 cmd/workspace/access-control/access-control.go

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 588cf9d63..dfe78790a 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-0be1b914249781b5e903b7676fd02255755bc851
\ No newline at end of file
+779817ed8d63031f5ea761fbd25ee84f38feec0d
\ No newline at end of file
diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl
index 2f4987b13..0c9fa089a 100644
--- a/.codegen/service.go.tmpl
+++ b/.codegen/service.go.tmpl
@@ -109,19 +109,16 @@ var {{.CamelName}}Overrides []func(
 	{{- end }}
 )
 
-{{- $excludeFromJson := list "http-request"}}
-
 func new{{.PascalName}}() *cobra.Command {
 	cmd := &cobra.Command{}
 
-	{{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}}
 	{{- if .Request}}
 
 	var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
 	{{- if .RequestBodyField }}
 	{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
 	{{- end }}
-	{{- if $canUseJson}}
+	{{- if .CanUseJson}}
 	var {{.CamelName}}Json flags.JsonFlag
 	{{- end}}
 	{{- end}}
@@ -138,7 +135,7 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $request = .RequestBodyField.Entity -}}
 	{{- end -}}
     {{if $request }}// TODO: short flags
-	{{- if $canUseJson}}
+	{{- if .CanUseJson}}
 	cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
 	{{- end}}
     {{$method := .}}
@@ -180,7 +177,7 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $hasRequiredArgs :=  and (not $hasIdPrompt) $hasPosArgs -}}
 	{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields))  -}}
 	{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
-	{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
+	{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}}
 	{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
 
 	{{- $atleastOneArgumentWithDescription := false -}}
@@ -242,7 +239,7 @@ func new{{.PascalName}}() *cobra.Command {
 		ctx := cmd.Context()
 		{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
 		{{- if .Request }}
-			{{ if $canUseJson }}
+			{{ if .CanUseJson }}
 			if cmd.Flags().Changed("json") {
 					diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
 					if diags.HasError() {
@@ -258,7 +255,7 @@ func new{{.PascalName}}() *cobra.Command {
 				return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
 			}{{- end}}
 			{{- if $hasPosArgs }}
-			{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
+			{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
 			{{- end}}
 			{{- if $hasIdPrompt}}
 				if len(args) == 0 {
@@ -282,9 +279,9 @@ func new{{.PascalName}}() *cobra.Command {
 
 			{{$method := .}}
 			{{- range $arg, $field := .RequiredPositionalArguments}}
-				{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}}
+				{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}}
 			{{- end -}}
-			{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
+			{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
 			}
 			{{- end}}
 
@@ -395,8 +392,7 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $method := .Method -}}
 	{{- $arg := .Arg -}}
 	{{- $hasIdPrompt := .HasIdPrompt -}}
-	{{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }}
-	{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }}
+	{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }}
 	{{- if $optionalIfJsonIsUsed  }}
 	if !cmd.Flags().Changed("json") {
 	{{- end }}
diff --git a/.gitattributes b/.gitattributes
index ebe94ed8e..0a8ddf3cb 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -31,7 +31,6 @@ cmd/account/users/users.go linguist-generated=true
 cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
 cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
 cmd/account/workspaces/workspaces.go linguist-generated=true
-cmd/workspace/access-control/access-control.go linguist-generated=true
 cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
 cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
 cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go
index afc1fb22a..ffe55db71 100644
--- a/bundle/deploy/terraform/convert_test.go
+++ b/bundle/deploy/terraform/convert_test.go
@@ -419,7 +419,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
 	src := resources.ModelServingEndpoint{
 		CreateServingEndpoint: &serving.CreateServingEndpoint{
 			Name: "name",
-			Config: &serving.EndpointCoreConfigInput{
+			Config: serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
@@ -474,7 +474,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
 			// and as such observed the `omitempty` tag.
 			// The new method leverages [dyn.Value] where any field that is not
 			// explicitly set is not part of the value.
-			Config: &serving.EndpointCoreConfigInput{
+			Config: serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
index 98cf2dc22..d46350bb7 100644
--- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
+++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
@@ -17,7 +17,7 @@ func TestConvertModelServingEndpoint(t *testing.T) {
 	src := resources.ModelServingEndpoint{
 		CreateServingEndpoint: &serving.CreateServingEndpoint{
 			Name: "name",
-			Config: &serving.EndpointCoreConfigInput{
+			Config: serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml
index d5a9bf69e..8ff5c9253 100644
--- a/bundle/internal/schema/annotations_openapi.yml
+++ b/bundle/internal/schema/annotations_openapi.yml
@@ -353,12 +353,12 @@ github.com/databricks/cli/bundle/config/resources.MlflowModel:
 github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
   "ai_gateway":
     "description": |-
-      The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported.
+      The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.
   "config":
     "description": |-
       The core config of the serving endpoint.
   "name":
-    "description": |-
+    "description": |
       The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.
       An endpoint name can consist of alphanumeric characters, dashes, and underscores.
   "rate_limits":
@@ -1974,9 +1974,6 @@ github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask:
       Parameters passed to the main method.
 
       Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
-  "run_as_repl":
-    "description": |-
-      Deprecated. A value of `false` is no longer supported.
 github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask:
   "parameters":
     "description": |-
@@ -2687,36 +2684,27 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScd
 github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
   "ai21labs_api_key":
     "description": |-
-      The Databricks secret key reference for an AI21 Labs API key. If you
-      prefer to paste your API key directly, see `ai21labs_api_key_plaintext`.
-      You must provide an API key using one of the following fields:
-      `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
+      The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
   "ai21labs_api_key_plaintext":
     "description": |-
-      An AI21 Labs API key provided as a plaintext string. If you prefer to
-      reference your key using Databricks Secrets, see `ai21labs_api_key`. You
-      must provide an API key using one of the following fields:
-      `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
+      An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig:
   "guardrails":
     "description": |-
       Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
   "inference_table_config":
     "description": |-
-      Configuration for payload logging using inference tables.
-      Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.
+      Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.
   "rate_limits":
     "description": |-
       Configuration for rate limits which can be set to limit endpoint traffic.
   "usage_tracking_config":
     "description": |-
-      Configuration to enable usage tracking using system tables.
-      These tables allow you to monitor operational usage on endpoints and their associated costs.
+      Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters:
   "invalid_keywords":
     "description": |-
-      List of invalid keywords.
-      AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.
+      List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.
   "pii":
     "description": |-
       Configuration for guardrail PII filter.
@@ -2725,14 +2713,15 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParame
       Indicates whether the safety filter is enabled.
   "valid_topics":
     "description": |-
-      The list of allowed topics.
-      Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.
+      The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior:
   "behavior":
     "description": |-
-      Configuration for input guardrail filters.
+      Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior:
   "_":
+    "description": |-
+      Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
     "enum":
       - |-
         NONE
@@ -2748,32 +2737,30 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails:
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig:
   "catalog_name":
     "description": |-
-      The name of the catalog in Unity Catalog. Required when enabling inference tables.
-      NOTE: On update, you have to disable inference table first in order to change the catalog name.
+      The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.
   "enabled":
     "description": |-
       Indicates whether the inference table is enabled.
   "schema_name":
     "description": |-
-      The name of the schema in Unity Catalog. Required when enabling inference tables.
-      NOTE: On update, you have to disable inference table first in order to change the schema name.
+      The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.
   "table_name_prefix":
     "description": |-
-      The prefix of the table in Unity Catalog.
-      NOTE: On update, you have to disable inference table first in order to change the prefix name.
+      The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit:
   "calls":
     "description": |-
       Used to specify how many calls are allowed for a key within the renewal_period.
   "key":
     "description": |-
-      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,
-      with 'endpoint' being the default if not specified.
+      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
   "renewal_period":
     "description": |-
       Renewal period field for a rate limit. Currently, only 'minute' is supported.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey:
   "_":
+    "description": |-
+      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
     "enum":
       - |-
         user
@@ -2781,6 +2768,8 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey:
         endpoint
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod:
   "_":
+    "description": |-
+      Renewal period field for a rate limit. Currently, only 'minute' is supported.
     "enum":
       - |-
         minute
@@ -2791,43 +2780,26 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingCo
 github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig:
   "aws_access_key_id":
     "description": |-
-      The Databricks secret key reference for an AWS access key ID with
-      permissions to interact with Bedrock services. If you prefer to paste
-      your API key directly, see `aws_access_key_id_plaintext`. You must provide an API
-      key using one of the following fields: `aws_access_key_id` or
-      `aws_access_key_id_plaintext`.
+      The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.
   "aws_access_key_id_plaintext":
     "description": |-
-      An AWS access key ID with permissions to interact with Bedrock services
-      provided as a plaintext string. If you prefer to reference your key using
-      Databricks Secrets, see `aws_access_key_id`. You must provide an API key
-      using one of the following fields: `aws_access_key_id` or
-      `aws_access_key_id_plaintext`.
+      An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.
   "aws_region":
     "description": |-
       The AWS region to use. Bedrock has to be enabled there.
   "aws_secret_access_key":
     "description": |-
-      The Databricks secret key reference for an AWS secret access key paired
-      with the access key ID, with permissions to interact with Bedrock
-      services. If you prefer to paste your API key directly, see
-      `aws_secret_access_key_plaintext`. You must provide an API key using one
-      of the following fields: `aws_secret_access_key` or
-      `aws_secret_access_key_plaintext`.
+      The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.
   "aws_secret_access_key_plaintext":
     "description": |-
-      An AWS secret access key paired with the access key ID, with permissions
-      to interact with Bedrock services provided as a plaintext string. If you
-      prefer to reference your key using Databricks Secrets, see
-      `aws_secret_access_key`. You must provide an API key using one of the
-      following fields: `aws_secret_access_key` or
-      `aws_secret_access_key_plaintext`.
+      An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.
   "bedrock_provider":
     "description": |-
-      The underlying provider in Amazon Bedrock. Supported values (case
-      insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
+      The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
 github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider:
   "_":
+    "description": |-
+      The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
     "enum":
       - |-
         anthropic
@@ -2840,16 +2812,10 @@ github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedro
 github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig:
   "anthropic_api_key":
     "description": |-
-      The Databricks secret key reference for an Anthropic API key. If you
-      prefer to paste your API key directly, see `anthropic_api_key_plaintext`.
-      You must provide an API key using one of the following fields:
-      `anthropic_api_key` or `anthropic_api_key_plaintext`.
+      The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.
   "anthropic_api_key_plaintext":
     "description": |-
-      The Anthropic API key provided as a plaintext string. If you prefer to
-      reference your key using Databricks Secrets, see `anthropic_api_key`. You
-      must provide an API key using one of the following fields:
-      `anthropic_api_key` or `anthropic_api_key_plaintext`.
+      The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput:
   "catalog_name":
     "description": |-
@@ -2865,58 +2831,42 @@ github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput:
       The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.
 github.com/databricks/databricks-sdk-go/service/serving.CohereConfig:
   "cohere_api_base":
-    "description": |-
-      This is an optional field to provide a customized base URL for the Cohere
-      API. If left unspecified, the standard Cohere base URL is used.
+    "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n"
   "cohere_api_key":
     "description": |-
-      The Databricks secret key reference for a Cohere API key. If you prefer
-      to paste your API key directly, see `cohere_api_key_plaintext`. You must
-      provide an API key using one of the following fields: `cohere_api_key` or
-      `cohere_api_key_plaintext`.
+      The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.
   "cohere_api_key_plaintext":
     "description": |-
-      The Cohere API key provided as a plaintext string. If you prefer to
-      reference your key using Databricks Secrets, see `cohere_api_key`. You
-      must provide an API key using one of the following fields:
-      `cohere_api_key` or `cohere_api_key_plaintext`.
+      The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig:
   "databricks_api_token":
-    "description": |-
-      The Databricks secret key reference for a Databricks API token that
-      corresponds to a user or service principal with Can Query access to the
-      model serving endpoint pointed to by this external model. If you prefer
-      to paste your API key directly, see `databricks_api_token_plaintext`. You
-      must provide an API key using one of the following fields:
-      `databricks_api_token` or `databricks_api_token_plaintext`.
+    "description": |
+      The Databricks secret key reference for a Databricks API token that corresponds to a user or service
+      principal with Can Query access to the model serving endpoint pointed to by this external model.
+      If you prefer to paste your API key directly, see `databricks_api_token_plaintext`.
+      You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.
   "databricks_api_token_plaintext":
-    "description": |-
-      The Databricks API token that corresponds to a user or service principal
-      with Can Query access to the model serving endpoint pointed to by this
-      external model provided as a plaintext string. If you prefer to reference
-      your key using Databricks Secrets, see `databricks_api_token`. You must
-      provide an API key using one of the following fields:
-      `databricks_api_token` or `databricks_api_token_plaintext`.
+    "description": |
+      The Databricks API token that corresponds to a user or service
+      principal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.
+      If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.
+      You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.
   "databricks_workspace_url":
-    "description": |-
-      The URL of the Databricks workspace containing the model serving endpoint
-      pointed to by this external model.
+    "description": |
+      The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.
 github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput:
   "auto_capture_config":
     "description": |-
       Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
-      Note: this field is deprecated for creating new provisioned throughput endpoints,
-      or updating existing provisioned throughput endpoints that never have inference table configured;
-      in these cases please use AI Gateway to manage inference tables.
   "served_entities":
     "description": |-
-      The list of served entities under the serving endpoint config.
+      A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.
   "served_models":
     "description": |-
-      (Deprecated, use served_entities instead) The list of served models under the serving endpoint config.
+      (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.
   "traffic_config":
     "description": |-
-      The traffic configuration associated with the serving endpoint config.
+      The traffic config defining how invocations to the serving endpoint should be routed.
 github.com/databricks/databricks-sdk-go/service/serving.EndpointTag:
   "key":
     "description": |-
@@ -2953,13 +2903,17 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModel:
     "description": |-
       PaLM Config. Only required if the provider is 'palm'.
   "provider":
-    "description": |-
-      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.
+    "description": |
+      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',
+      'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
   "task":
     "description": |-
       The task type of the external model.
 github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider:
   "_":
+    "description": |
+      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',
+      'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
     "enum":
       - |-
         ai21labs
@@ -2980,114 +2934,70 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider:
 github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig:
   "private_key":
     "description": |-
-      The Databricks secret key reference for a private key for the service
-      account which has access to the Google Cloud Vertex AI Service. See [Best
-      practices for managing service account keys]. If you prefer to paste your
-      API key directly, see `private_key_plaintext`. You must provide an API
-      key using one of the following fields: `private_key` or
-      `private_key_plaintext`
-
-      [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys
+      The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`
   "private_key_plaintext":
     "description": |-
-      The private key for the service account which has access to the Google
-      Cloud Vertex AI Service provided as a plaintext secret. See [Best
-      practices for managing service account keys]. If you prefer to reference
-      your key using Databricks Secrets, see `private_key`. You must provide an
-      API key using one of the following fields: `private_key` or
-      `private_key_plaintext`.
-
-      [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys
+      The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.
   "project_id":
     "description": |-
-      This is the Google Cloud project id that the service account is
-      associated with.
+      This is the Google Cloud project id that the service account is associated with.
   "region":
     "description": |-
-      This is the region for the Google Cloud Vertex AI Service. See [supported
-      regions] for more details. Some models are only available in specific
-      regions.
-
-      [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations
+      This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.
 github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig:
-  "_":
-    "description": |-
-      Configs needed to create an OpenAI model route.
   "microsoft_entra_client_id":
-    "description": |-
-      This field is only required for Azure AD OpenAI and is the Microsoft
-      Entra Client ID.
+    "description": |
+      This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.
   "microsoft_entra_client_secret":
-    "description": |-
-      The Databricks secret key reference for a client secret used for
-      Microsoft Entra ID authentication. If you prefer to paste your client
-      secret directly, see `microsoft_entra_client_secret_plaintext`. You must
-      provide an API key using one of the following fields:
-      `microsoft_entra_client_secret` or
-      `microsoft_entra_client_secret_plaintext`.
+    "description": |
+      The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.
+      If you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.
+      You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.
   "microsoft_entra_client_secret_plaintext":
-    "description": |-
-      The client secret used for Microsoft Entra ID authentication provided as
-      a plaintext string. If you prefer to reference your key using Databricks
-      Secrets, see `microsoft_entra_client_secret`. You must provide an API key
-      using one of the following fields: `microsoft_entra_client_secret` or
-      `microsoft_entra_client_secret_plaintext`.
+    "description": |
+      The client secret used for Microsoft Entra ID authentication provided as a plaintext string.
+      If you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.
+      You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.
   "microsoft_entra_tenant_id":
-    "description": |-
-      This field is only required for Azure AD OpenAI and is the Microsoft
-      Entra Tenant ID.
+    "description": |
+      This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.
   "openai_api_base":
-    "description": |-
-      This is a field to provide a customized base URl for the OpenAI API. For
-      Azure OpenAI, this field is required, and is the base URL for the Azure
-      OpenAI API service provided by Azure. For other OpenAI API types, this
-      field is optional, and if left unspecified, the standard OpenAI base URL
-      is used.
+    "description": |
+      This is a field to provide a customized base URl for the OpenAI API.
+      For Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service
+      provided by Azure.
+      For other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.
   "openai_api_key":
     "description": |-
-      The Databricks secret key reference for an OpenAI API key using the
-      OpenAI or Azure service. If you prefer to paste your API key directly,
-      see `openai_api_key_plaintext`. You must provide an API key using one of
-      the following fields: `openai_api_key` or `openai_api_key_plaintext`.
+      The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.
   "openai_api_key_plaintext":
     "description": |-
-      The OpenAI API key using the OpenAI or Azure service provided as a
-      plaintext string. If you prefer to reference your key using Databricks
-      Secrets, see `openai_api_key`. You must provide an API key using one of
-      the following fields: `openai_api_key` or `openai_api_key_plaintext`.
+      The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.
   "openai_api_type":
-    "description": |-
-      This is an optional field to specify the type of OpenAI API to use. For
-      Azure OpenAI, this field is required, and adjust this parameter to
-      represent the preferred security access validation protocol. For access
-      token validation, use azure. For authentication using Azure Active
+    "description": |
+      This is an optional field to specify the type of OpenAI API to use.
+      For Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security
+      access validation protocol. For access token validation, use azure. For authentication using Azure Active
       Directory (Azure AD) use, azuread.
   "openai_api_version":
-    "description": |-
-      This is an optional field to specify the OpenAI API version. For Azure
-      OpenAI, this field is required, and is the version of the Azure OpenAI
-      service to utilize, specified by a date.
+    "description": |
+      This is an optional field to specify the OpenAI API version.
+      For Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to
+      utilize, specified by a date.
   "openai_deployment_name":
-    "description": |-
-      This field is only required for Azure OpenAI and is the name of the
-      deployment resource for the Azure OpenAI service.
+    "description": |
+      This field is only required for Azure OpenAI and is the name of the deployment resource for the
+      Azure OpenAI service.
   "openai_organization":
-    "description": |-
-      This is an optional field to specify the organization in OpenAI or Azure
-      OpenAI.
+    "description": |
+      This is an optional field to specify the organization in OpenAI or Azure OpenAI.
 github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig:
   "palm_api_key":
     "description": |-
-      The Databricks secret key reference for a PaLM API key. If you prefer to
-      paste your API key directly, see `palm_api_key_plaintext`. You must
-      provide an API key using one of the following fields: `palm_api_key` or
-      `palm_api_key_plaintext`.
+      The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.
   "palm_api_key_plaintext":
     "description": |-
-      The PaLM API key provided as a plaintext string. If you prefer to
-      reference your key using Databricks Secrets, see `palm_api_key`. You must
-      provide an API key using one of the following fields: `palm_api_key` or
-      `palm_api_key_plaintext`.
+      The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.RateLimit:
   "calls":
     "description": |-
@@ -3100,6 +3010,8 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimit:
       Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
 github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey:
   "_":
+    "description": |-
+      Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
     "enum":
       - |-
         user
@@ -3107,6 +3019,8 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey:
         endpoint
 github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod:
   "_":
+    "description": |-
+      Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
     "enum":
       - |-
         minute
@@ -3119,15 +3033,21 @@ github.com/databricks/databricks-sdk-go/service/serving.Route:
       The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.
 github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
   "entity_name":
+    "description": |
+      The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),
+      or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of
+      __catalog_name__.__schema_name__.__model_name__.
+  "entity_version":
     "description": |-
-      The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.
-  "entity_version": {}
+      The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.
   "environment_vars":
-    "description": |-
-      An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
+    "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`"
   "external_model":
-    "description": |-
-      The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.
+    "description": |
+      The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)
+      can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,
+      it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.
+      The task type of all external models within an endpoint must be the same.
   "instance_profile_arn":
     "description": |-
       ARN of the instance profile that the served entity uses to access AWS resources.
@@ -3138,46 +3058,68 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
     "description": |-
       The minimum tokens per second that the endpoint can scale down to.
   "name":
-    "description": |-
-      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.
+    "description": |
+      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.
+      If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other
+      entities, it defaults to <entity-name>-<entity-version>.
   "scale_to_zero_enabled":
     "description": |-
       Whether the compute resources for the served entity should scale down to zero.
   "workload_size":
-    "description": |-
-      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
+    "description": |
+      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.
+      A single unit of provisioned concurrency can process one request at a time.
+      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
+      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
   "workload_type":
-    "description": |-
-      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
+    "description": |
+      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
+      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
+      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
   "environment_vars":
-    "description": |-
-      An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
+    "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`"
   "instance_profile_arn":
     "description": |-
-      ARN of the instance profile that the served entity uses to access AWS resources.
+      ARN of the instance profile that the served model will use to access AWS resources.
   "max_provisioned_throughput":
     "description": |-
       The maximum tokens per second that the endpoint can scale up to.
   "min_provisioned_throughput":
     "description": |-
       The minimum tokens per second that the endpoint can scale down to.
-  "model_name": {}
-  "model_version": {}
-  "name":
+  "model_name":
+    "description": |
+      The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,
+      in the form of __catalog_name__.__schema_name__.__model_name__.
+  "model_version":
     "description": |-
-      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.
+      The version of the model in Databricks Model Registry or Unity Catalog to be served.
+  "name":
+    "description": |
+      The name of a served model. It must be unique across an endpoint. If not specified, this field will default to <model-name>-<model-version>.
+      A served model name can consist of alphanumeric characters, dashes, and underscores.
   "scale_to_zero_enabled":
     "description": |-
-      Whether the compute resources for the served entity should scale down to zero.
+      Whether the compute resources for the served model should scale down to zero.
   "workload_size":
-    "description": |-
-      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
+    "description": |
+      The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.
+      A single unit of provisioned concurrency can process one request at a time.
+      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
+      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
   "workload_type":
-    "description": |-
-      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
+    "description": |
+      The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
+      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
+      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize:
   "_":
+    "description": |
+      The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.
+      A single unit of provisioned concurrency can process one request at a time.
+      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
+      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
     "enum":
       - |-
         Small
@@ -3187,26 +3129,17 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkload
         Large
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType:
   "_":
+    "description": |
+      The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
+      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
+      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
     "enum":
       - |-
         CPU
-      - |-
-        GPU_MEDIUM
       - |-
         GPU_SMALL
-      - |-
-        GPU_LARGE
-      - |-
-        MULTIGPU_MEDIUM
-github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType:
-  "_":
-    "enum":
-      - |-
-        CPU
       - |-
         GPU_MEDIUM
-      - |-
-        GPU_SMALL
       - |-
         GPU_LARGE
       - |-
diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml
index 323432fa3..120a12543 100644
--- a/bundle/internal/schema/annotations_openapi_overrides.yml
+++ b/bundle/internal/schema/annotations_openapi_overrides.yml
@@ -197,14 +197,3 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
   "manual":
     "description": |-
       PLACEHOLDER
-github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
-  "entity_version":
-    "description": |-
-      PLACEHOLDER
-github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
-  "model_name":
-    "description": |-
-      PLACEHOLDER
-  "model_version":
-    "description": |-
-      PLACEHOLDER
diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json
index 17a621ba0..4a3b56814 100644
--- a/bundle/schema/jsonschema.json
+++ b/bundle/schema/jsonschema.json
@@ -546,7 +546,7 @@
                     "type": "object",
                     "properties": {
                       "ai_gateway": {
-                        "description": "The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported.",
+                        "description": "The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.",
                         "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig"
                       },
                       "config": {
@@ -554,7 +554,7 @@
                         "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput"
                       },
                       "name": {
-                        "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.",
+                        "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.\n",
                         "$ref": "#/$defs/string"
                       },
                       "permissions": {
@@ -575,6 +575,7 @@
                     },
                     "additionalProperties": false,
                     "required": [
+                      "config",
                       "name"
                     ]
                   },
@@ -4141,10 +4142,6 @@
                     "parameters": {
                       "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.",
                       "$ref": "#/$defs/slice/string"
-                    },
-                    "run_as_repl": {
-                      "description": "Deprecated. A value of `false` is no longer supported.",
-                      "$ref": "#/$defs/bool"
                     }
                   },
                   "additionalProperties": false
@@ -5505,11 +5502,11 @@
                   "type": "object",
                   "properties": {
                     "ai21labs_api_key": {
-                      "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "ai21labs_api_key_plaintext": {
-                      "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
+                      "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5531,7 +5528,7 @@
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails"
                     },
                     "inference_table_config": {
-                      "description": "Configuration for payload logging using inference tables.\nUse these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.",
+                      "description": "Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig"
                     },
                     "rate_limits": {
@@ -5539,7 +5536,7 @@
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit"
                     },
                     "usage_tracking_config": {
-                      "description": "Configuration to enable usage tracking using system tables.\nThese tables allow you to monitor operational usage on endpoints and their associated costs.",
+                      "description": "Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig"
                     }
                   },
@@ -5557,7 +5554,7 @@
                   "type": "object",
                   "properties": {
                     "invalid_keywords": {
-                      "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.",
+                      "description": "List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.",
                       "$ref": "#/$defs/slice/string"
                     },
                     "pii": {
@@ -5569,7 +5566,7 @@
                       "$ref": "#/$defs/bool"
                     },
                     "valid_topics": {
-                      "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.",
+                      "description": "The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.",
                       "$ref": "#/$defs/slice/string"
                     }
                   },
@@ -5587,11 +5584,14 @@
                   "type": "object",
                   "properties": {
                     "behavior": {
-                      "description": "Configuration for input guardrail filters.",
+                      "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior"
                     }
                   },
-                  "additionalProperties": false
+                  "additionalProperties": false,
+                  "required": [
+                    "behavior"
+                  ]
                 },
                 {
                   "type": "string",
@@ -5603,6 +5603,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.",
                   "enum": [
                     "NONE",
                     "BLOCK"
@@ -5642,7 +5643,7 @@
                   "type": "object",
                   "properties": {
                     "catalog_name": {
-                      "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the catalog name.",
+                      "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.",
                       "$ref": "#/$defs/string"
                     },
                     "enabled": {
@@ -5650,11 +5651,11 @@
                       "$ref": "#/$defs/bool"
                     },
                     "schema_name": {
-                      "description": "The name of the schema in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the schema name.",
+                      "description": "The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.",
                       "$ref": "#/$defs/string"
                     },
                     "table_name_prefix": {
-                      "description": "The prefix of the table in Unity Catalog.\nNOTE: On update, you have to disable inference table first in order to change the prefix name.",
+                      "description": "The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5673,10 +5674,10 @@
                   "properties": {
                     "calls": {
                       "description": "Used to specify how many calls are allowed for a key within the renewal_period.",
-                      "$ref": "#/$defs/int64"
+                      "$ref": "#/$defs/int"
                     },
                     "key": {
-                      "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.",
+                      "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey"
                     },
                     "renewal_period": {
@@ -5700,6 +5701,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
                   "enum": [
                     "user",
                     "endpoint"
@@ -5715,6 +5717,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.",
                   "enum": [
                     "minute"
                   ]
@@ -5749,11 +5752,11 @@
                   "type": "object",
                   "properties": {
                     "aws_access_key_id": {
-                      "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.",
+                      "description": "The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_access_key_id_plaintext": {
-                      "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.",
+                      "description": "An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_region": {
@@ -5761,15 +5764,15 @@
                       "$ref": "#/$defs/string"
                     },
                     "aws_secret_access_key": {
-                      "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_secret_access_key_plaintext": {
-                      "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.",
+                      "description": "An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "bedrock_provider": {
-                      "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
+                      "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider"
                     }
                   },
@@ -5789,6 +5792,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
                   "enum": [
                     "anthropic",
                     "cohere",
@@ -5808,11 +5812,11 @@
                   "type": "object",
                   "properties": {
                     "anthropic_api_key": {
-                      "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "anthropic_api_key_plaintext": {
-                      "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.",
+                      "description": "The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5860,15 +5864,15 @@
                   "type": "object",
                   "properties": {
                     "cohere_api_base": {
-                      "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.",
+                      "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n",
                       "$ref": "#/$defs/string"
                     },
                     "cohere_api_key": {
-                      "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "cohere_api_key_plaintext": {
-                      "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.",
+                      "description": "The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5886,15 +5890,15 @@
                   "type": "object",
                   "properties": {
                     "databricks_api_token": {
-                      "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.",
+                      "description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\nIf you prefer to paste your API key directly, see `databricks_api_token_plaintext`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
                       "$ref": "#/$defs/string"
                     },
                     "databricks_api_token_plaintext": {
-                      "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.",
+                      "description": "The Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
                       "$ref": "#/$defs/string"
                     },
                     "databricks_workspace_url": {
-                      "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.",
+                      "description": "The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.\n",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5915,19 +5919,19 @@
                   "type": "object",
                   "properties": {
                     "auto_capture_config": {
-                      "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.",
+                      "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput"
                     },
                     "served_entities": {
-                      "description": "The list of served entities under the serving endpoint config.",
+                      "description": "A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.",
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput"
                     },
                     "served_models": {
-                      "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.",
+                      "description": "(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.",
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput"
                     },
                     "traffic_config": {
-                      "description": "The traffic configuration associated with the serving endpoint config.",
+                      "description": "The traffic config defining how invocations to the serving endpoint should be routed.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig"
                     }
                   },
@@ -6006,7 +6010,7 @@
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig"
                     },
                     "provider": {
-                      "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
+                      "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider"
                     },
                     "task": {
@@ -6031,6 +6035,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n",
                   "enum": [
                     "ai21labs",
                     "anthropic",
@@ -6054,27 +6059,23 @@
                   "type": "object",
                   "properties": {
                     "private_key": {
-                      "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys",
+                      "description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`",
                       "$ref": "#/$defs/string"
                     },
                     "private_key_plaintext": {
-                      "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys",
+                      "description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "project_id": {
-                      "description": "This is the Google Cloud project id that the service account is\nassociated with.",
+                      "description": "This is the Google Cloud project id that the service account is associated with.",
                       "$ref": "#/$defs/string"
                     },
                     "region": {
-                      "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations",
+                      "description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.",
                       "$ref": "#/$defs/string"
                     }
                   },
-                  "additionalProperties": false,
-                  "required": [
-                    "project_id",
-                    "region"
-                  ]
+                  "additionalProperties": false
                 },
                 {
                   "type": "string",
@@ -6086,50 +6087,49 @@
               "oneOf": [
                 {
                   "type": "object",
-                  "description": "Configs needed to create an OpenAI model route.",
                   "properties": {
                     "microsoft_entra_client_id": {
-                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.",
+                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_client_secret": {
-                      "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.",
+                      "description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_client_secret_plaintext": {
-                      "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.",
+                      "description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_tenant_id": {
-                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.",
+                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_base": {
-                      "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.",
+                      "description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_key": {
-                      "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_key_plaintext": {
-                      "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.",
+                      "description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_type": {
-                      "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.",
+                      "description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_version": {
-                      "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.",
+                      "description": "This is an optional field to specify the OpenAI API version.\nFor Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to\nutilize, specified by a date.\n",
                       "$ref": "#/$defs/string"
                     },
                     "openai_deployment_name": {
-                      "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.",
+                      "description": "This field is only required for Azure OpenAI and is the name of the deployment resource for the\nAzure OpenAI service.\n",
                       "$ref": "#/$defs/string"
                     },
                     "openai_organization": {
-                      "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.",
+                      "description": "This is an optional field to specify the organization in OpenAI or Azure OpenAI.\n",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -6147,11 +6147,11 @@
                   "type": "object",
                   "properties": {
                     "palm_api_key": {
-                      "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "palm_api_key_plaintext": {
-                      "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.",
+                      "description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -6170,7 +6170,7 @@
                   "properties": {
                     "calls": {
                       "description": "Used to specify how many calls are allowed for a key within the renewal_period.",
-                      "$ref": "#/$defs/int64"
+                      "$ref": "#/$defs/int"
                     },
                     "key": {
                       "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
@@ -6197,6 +6197,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
                   "enum": [
                     "user",
                     "endpoint"
@@ -6212,6 +6213,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.",
                   "enum": [
                     "minute"
                   ]
@@ -6254,18 +6256,19 @@
                   "type": "object",
                   "properties": {
                     "entity_name": {
-                      "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.",
+                      "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),\nor a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of\n__catalog_name__.__schema_name__.__model_name__.\n",
                       "$ref": "#/$defs/string"
                     },
                     "entity_version": {
+                      "description": "The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.",
                       "$ref": "#/$defs/string"
                     },
                     "environment_vars": {
-                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
+                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
                       "$ref": "#/$defs/map/string"
                     },
                     "external_model": {
-                      "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.",
+                      "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,\nit cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.\nThe task type of all external models within an endpoint must be the same.\n",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel"
                     },
                     "instance_profile_arn": {
@@ -6281,7 +6284,7 @@
                       "$ref": "#/$defs/int"
                     },
                     "name": {
-                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.",
+                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.\nIf not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other\nentities, it defaults to \u003centity-name\u003e-\u003centity-version\u003e.\n",
                       "$ref": "#/$defs/string"
                     },
                     "scale_to_zero_enabled": {
@@ -6289,12 +6292,12 @@
                       "$ref": "#/$defs/bool"
                     },
                     "workload_size": {
-                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.",
+                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.\n",
                       "$ref": "#/$defs/string"
                     },
                     "workload_type": {
-                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).",
-                      "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType"
+                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
+                      "$ref": "#/$defs/string"
                     }
                   },
                   "additionalProperties": false
@@ -6311,11 +6314,11 @@
                   "type": "object",
                   "properties": {
                     "environment_vars": {
-                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
+                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
                       "$ref": "#/$defs/map/string"
                     },
                     "instance_profile_arn": {
-                      "description": "ARN of the instance profile that the served entity uses to access AWS resources.",
+                      "description": "ARN of the instance profile that the served model will use to access AWS resources.",
                       "$ref": "#/$defs/string"
                     },
                     "max_provisioned_throughput": {
@@ -6327,25 +6330,27 @@
                       "$ref": "#/$defs/int"
                     },
                     "model_name": {
+                      "description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n",
                       "$ref": "#/$defs/string"
                     },
                     "model_version": {
+                      "description": "The version of the model in Databricks Model Registry or Unity Catalog to be served.",
                       "$ref": "#/$defs/string"
                     },
                     "name": {
-                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.",
+                      "description": "The name of a served model. It must be unique across an endpoint. If not specified, this field will default to \u003cmodel-name\u003e-\u003cmodel-version\u003e.\nA served model name can consist of alphanumeric characters, dashes, and underscores.\n",
                       "$ref": "#/$defs/string"
                     },
                     "scale_to_zero_enabled": {
-                      "description": "Whether the compute resources for the served entity should scale down to zero.",
+                      "description": "Whether the compute resources for the served model should scale down to zero.",
                       "$ref": "#/$defs/bool"
                     },
                     "workload_size": {
-                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.",
+                      "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize"
                     },
                     "workload_type": {
-                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).",
+                      "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType"
                     }
                   },
@@ -6366,6 +6371,7 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n",
                   "enum": [
                     "Small",
                     "Medium",
@@ -6382,28 +6388,11 @@
               "oneOf": [
                 {
                   "type": "string",
+                  "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
                   "enum": [
                     "CPU",
-                    "GPU_MEDIUM",
                     "GPU_SMALL",
-                    "GPU_LARGE",
-                    "MULTIGPU_MEDIUM"
-                  ]
-                },
-                {
-                  "type": "string",
-                  "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
-                }
-              ]
-            },
-            "serving.ServingModelWorkloadType": {
-              "oneOf": [
-                {
-                  "type": "string",
-                  "enum": [
-                    "CPU",
                     "GPU_MEDIUM",
-                    "GPU_SMALL",
                     "GPU_LARGE",
                     "MULTIGPU_MEDIUM"
                   ]
diff --git a/cmd/account/custom-app-integration/custom-app-integration.go b/cmd/account/custom-app-integration/custom-app-integration.go
index 43e458bc6..1eec1018e 100755
--- a/cmd/account/custom-app-integration/custom-app-integration.go
+++ b/cmd/account/custom-app-integration/custom-app-integration.go
@@ -307,7 +307,6 @@ func newUpdate() *cobra.Command {
 	cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	// TODO: array: redirect_urls
-	// TODO: array: scopes
 	// TODO: complex arg: token_access_policy
 
 	cmd.Use = "update INTEGRATION_ID"
diff --git a/cmd/api/api.go b/cmd/api/api.go
index fad8a026f..c3a3eb0b6 100644
--- a/cmd/api/api.go
+++ b/cmd/api/api.go
@@ -62,7 +62,7 @@ func makeCommand(method string) *cobra.Command {
 
 			var response any
 			headers := map[string]string{"Content-Type": "application/json"}
-			err = api.Do(cmd.Context(), method, path, headers, nil, request, &response)
+			err = api.Do(cmd.Context(), method, path, headers, request, &response)
 			if err != nil {
 				return err
 			}
diff --git a/cmd/workspace/access-control/access-control.go b/cmd/workspace/access-control/access-control.go
deleted file mode 100755
index 7668265fb..000000000
--- a/cmd/workspace/access-control/access-control.go
+++ /dev/null
@@ -1,109 +0,0 @@
-// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-package access_control
-
-import (
-	"fmt"
-
-	"github.com/databricks/cli/cmd/root"
-	"github.com/databricks/cli/libs/cmdio"
-	"github.com/databricks/cli/libs/flags"
-	"github.com/databricks/databricks-sdk-go/service/iam"
-	"github.com/spf13/cobra"
-)
-
-// Slice with functions to override default command behavior.
-// Functions can be added from the `init()` function in manually curated files in this directory.
-var cmdOverrides []func(*cobra.Command)
-
-func New() *cobra.Command {
-	cmd := &cobra.Command{
-		Use:     "access-control",
-		Short:   `Rule based Access Control for Databricks Resources.`,
-		Long:    `Rule based Access Control for Databricks Resources.`,
-		GroupID: "iam",
-		Annotations: map[string]string{
-			"package": "iam",
-		},
-
-		// This service is being previewed; hide from help output.
-		Hidden: true,
-	}
-
-	// Add methods
-	cmd.AddCommand(newCheckPolicy())
-
-	// Apply optional overrides to this command.
-	for _, fn := range cmdOverrides {
-		fn(cmd)
-	}
-
-	return cmd
-}
-
-// start check-policy command
-
-// Slice with functions to override default command behavior.
-// Functions can be added from the `init()` function in manually curated files in this directory.
-var checkPolicyOverrides []func(
-	*cobra.Command,
-	*iam.CheckPolicyRequest,
-)
-
-func newCheckPolicy() *cobra.Command {
-	cmd := &cobra.Command{}
-
-	var checkPolicyReq iam.CheckPolicyRequest
-	var checkPolicyJson flags.JsonFlag
-
-	// TODO: short flags
-	cmd.Flags().Var(&checkPolicyJson, "json", `either inline JSON string or @path/to/file.json with request body`)
-
-	// TODO: complex arg: resource_info
-
-	cmd.Use = "check-policy"
-	cmd.Short = `Check access policy to a resource.`
-	cmd.Long = `Check access policy to a resource.`
-
-	cmd.Annotations = make(map[string]string)
-
-	cmd.PreRunE = root.MustWorkspaceClient
-	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
-		ctx := cmd.Context()
-		w := root.WorkspaceClient(ctx)
-
-		if cmd.Flags().Changed("json") {
-			diags := checkPolicyJson.Unmarshal(&checkPolicyReq)
-			if diags.HasError() {
-				return diags.Error()
-			}
-			if len(diags) > 0 {
-				err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags)
-				if err != nil {
-					return err
-				}
-			}
-		} else {
-			return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
-		}
-
-		response, err := w.AccessControl.CheckPolicy(ctx, checkPolicyReq)
-		if err != nil {
-			return err
-		}
-		return cmdio.Render(ctx, response)
-	}
-
-	// Disable completions since they are not applicable.
-	// Can be overridden by manual implementation in `override.go`.
-	cmd.ValidArgsFunction = cobra.NoFileCompletions
-
-	// Apply optional overrides to this command.
-	for _, fn := range checkPolicyOverrides {
-		fn(cmd, &checkPolicyReq)
-	}
-
-	return cmd
-}
-
-// end service AccessControl
diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go
index c447bd736..f07d0cf76 100755
--- a/cmd/workspace/cmd.go
+++ b/cmd/workspace/cmd.go
@@ -3,7 +3,6 @@
 package workspace
 
 import (
-	access_control "github.com/databricks/cli/cmd/workspace/access-control"
 	alerts "github.com/databricks/cli/cmd/workspace/alerts"
 	alerts_legacy "github.com/databricks/cli/cmd/workspace/alerts-legacy"
 	apps "github.com/databricks/cli/cmd/workspace/apps"
@@ -97,7 +96,6 @@ import (
 func All() []*cobra.Command {
 	var out []*cobra.Command
 
-	out = append(out, access_control.New())
 	out = append(out, alerts.New())
 	out = append(out, alerts_legacy.New())
 	out = append(out, apps.New())
diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go
index 4d6262cff..504beac5e 100755
--- a/cmd/workspace/providers/providers.go
+++ b/cmd/workspace/providers/providers.go
@@ -64,7 +64,7 @@ func newCreate() *cobra.Command {
 	cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`)
-	cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`)
+	cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`)
 
 	cmd.Use = "create NAME AUTHENTICATION_TYPE"
 	cmd.Short = `Create an auth provider.`
@@ -430,7 +430,7 @@ func newUpdate() *cobra.Command {
 	cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`)
 	cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the provider.`)
 	cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`)
-	cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`)
+	cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`)
 
 	cmd.Use = "update NAME"
 	cmd.Short = `Update a provider.`
diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go
index 6d6ce42f1..56abd2014 100755
--- a/cmd/workspace/recipients/recipients.go
+++ b/cmd/workspace/recipients/recipients.go
@@ -91,7 +91,7 @@ func newCreate() *cobra.Command {
 	cmd.Long = `Create a share recipient.
   
   Creates a new recipient with the delta sharing authentication type in the
-  metastore. The caller must be a metastore admin or have the
+  metastore. The caller must be a metastore admin or has the
   **CREATE_RECIPIENT** privilege on the metastore.
 
   Arguments:
@@ -186,16 +186,28 @@ func newDelete() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
-	cmd.Args = func(cmd *cobra.Command, args []string) error {
-		check := root.ExactArgs(1)
-		return check(cmd, args)
-	}
-
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
+		if len(args) == 0 {
+			promptSpinner := cmdio.Spinner(ctx)
+			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
+			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
+			close(promptSpinner)
+			if err != nil {
+				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
+			}
+			id, err := cmdio.Select(ctx, names, "Name of the recipient")
+			if err != nil {
+				return err
+			}
+			args = append(args, id)
+		}
+		if len(args) != 1 {
+			return fmt.Errorf("expected to have name of the recipient")
+		}
 		deleteReq.Name = args[0]
 
 		err = w.Recipients.Delete(ctx, deleteReq)
@@ -246,16 +258,28 @@ func newGet() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
-	cmd.Args = func(cmd *cobra.Command, args []string) error {
-		check := root.ExactArgs(1)
-		return check(cmd, args)
-	}
-
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
+		if len(args) == 0 {
+			promptSpinner := cmdio.Spinner(ctx)
+			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
+			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
+			close(promptSpinner)
+			if err != nil {
+				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
+			}
+			id, err := cmdio.Select(ctx, names, "Name of the recipient")
+			if err != nil {
+				return err
+			}
+			args = append(args, id)
+		}
+		if len(args) != 1 {
+			return fmt.Errorf("expected to have name of the recipient")
+		}
 		getReq.Name = args[0]
 
 		response, err := w.Recipients.Get(ctx, getReq)
@@ -360,7 +384,7 @@ func newRotateToken() *cobra.Command {
   the provided token info. The caller must be the owner of the recipient.
 
   Arguments:
-    NAME: The name of the Recipient.
+    NAME: The name of the recipient.
     EXISTING_TOKEN_EXPIRE_IN_SECONDS: The expiration time of the bearer token in ISO 8601 format. This will set
       the expiration_time of existing token only to a smaller timestamp, it
       cannot extend the expiration_time. Use 0 to expire the existing token
@@ -455,16 +479,28 @@ func newSharePermissions() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
-	cmd.Args = func(cmd *cobra.Command, args []string) error {
-		check := root.ExactArgs(1)
-		return check(cmd, args)
-	}
-
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
+		if len(args) == 0 {
+			promptSpinner := cmdio.Spinner(ctx)
+			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
+			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
+			close(promptSpinner)
+			if err != nil {
+				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
+			}
+			id, err := cmdio.Select(ctx, names, "The name of the Recipient")
+			if err != nil {
+				return err
+			}
+			args = append(args, id)
+		}
+		if len(args) != 1 {
+			return fmt.Errorf("expected to have the name of the recipient")
+		}
 		sharePermissionsReq.Name = args[0]
 
 		response, err := w.Recipients.SharePermissions(ctx, sharePermissionsReq)
@@ -524,11 +560,6 @@ func newUpdate() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
-	cmd.Args = func(cmd *cobra.Command, args []string) error {
-		check := root.ExactArgs(1)
-		return check(cmd, args)
-	}
-
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
@@ -546,13 +577,30 @@ func newUpdate() *cobra.Command {
 				}
 			}
 		}
+		if len(args) == 0 {
+			promptSpinner := cmdio.Spinner(ctx)
+			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
+			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
+			close(promptSpinner)
+			if err != nil {
+				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
+			}
+			id, err := cmdio.Select(ctx, names, "Name of the recipient")
+			if err != nil {
+				return err
+			}
+			args = append(args, id)
+		}
+		if len(args) != 1 {
+			return fmt.Errorf("expected to have name of the recipient")
+		}
 		updateReq.Name = args[0]
 
-		response, err := w.Recipients.Update(ctx, updateReq)
+		err = w.Recipients.Update(ctx, updateReq)
 		if err != nil {
 			return err
 		}
-		return cmdio.Render(ctx, response)
+		return nil
 	}
 
 	// Disable completions since they are not applicable.
diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go
index 034133623..cc99177c7 100755
--- a/cmd/workspace/serving-endpoints/serving-endpoints.go
+++ b/cmd/workspace/serving-endpoints/serving-endpoints.go
@@ -49,7 +49,6 @@ func New() *cobra.Command {
 	cmd.AddCommand(newGetOpenApi())
 	cmd.AddCommand(newGetPermissionLevels())
 	cmd.AddCommand(newGetPermissions())
-	cmd.AddCommand(newHttpRequest())
 	cmd.AddCommand(newList())
 	cmd.AddCommand(newLogs())
 	cmd.AddCommand(newPatch())
@@ -154,34 +153,16 @@ func newCreate() *cobra.Command {
 	cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	// TODO: complex arg: ai_gateway
-	// TODO: complex arg: config
 	// TODO: array: rate_limits
 	cmd.Flags().BoolVar(&createReq.RouteOptimized, "route-optimized", createReq.RouteOptimized, `Enable route optimization for the serving endpoint.`)
 	// TODO: array: tags
 
-	cmd.Use = "create NAME"
+	cmd.Use = "create"
 	cmd.Short = `Create a new serving endpoint.`
-	cmd.Long = `Create a new serving endpoint.
-
-  Arguments:
-    NAME: The name of the serving endpoint. This field is required and must be
-      unique across a Databricks workspace. An endpoint name can consist of
-      alphanumeric characters, dashes, and underscores.`
+	cmd.Long = `Create a new serving endpoint.`
 
 	cmd.Annotations = make(map[string]string)
 
-	cmd.Args = func(cmd *cobra.Command, args []string) error {
-		if cmd.Flags().Changed("json") {
-			err := root.ExactArgs(0)(cmd, args)
-			if err != nil {
-				return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name' in your JSON input")
-			}
-			return nil
-		}
-		check := root.ExactArgs(1)
-		return check(cmd, args)
-	}
-
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
@@ -198,9 +179,8 @@ func newCreate() *cobra.Command {
 					return err
 				}
 			}
-		}
-		if !cmd.Flags().Changed("json") {
-			createReq.Name = args[0]
+		} else {
+			return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
 		}
 
 		wait, err := w.ServingEndpoints.Create(ctx, createReq)
@@ -253,7 +233,10 @@ func newDelete() *cobra.Command {
 
 	cmd.Use = "delete NAME"
 	cmd.Short = `Delete a serving endpoint.`
-	cmd.Long = `Delete a serving endpoint.`
+	cmd.Long = `Delete a serving endpoint.
+
+  Arguments:
+    NAME: The name of the serving endpoint. This field is required.`
 
 	cmd.Annotations = make(map[string]string)
 
@@ -449,12 +432,11 @@ func newGetOpenApi() *cobra.Command {
 
 		getOpenApiReq.Name = args[0]
 
-		response, err := w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq)
+		err = w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq)
 		if err != nil {
 			return err
 		}
-		defer response.Contents.Close()
-		return cmdio.Render(ctx, response.Contents)
+		return nil
 	}
 
 	// Disable completions since they are not applicable.
@@ -586,77 +568,6 @@ func newGetPermissions() *cobra.Command {
 	return cmd
 }
 
-// start http-request command
-
-// Slice with functions to override default command behavior.
-// Functions can be added from the `init()` function in manually curated files in this directory.
-var httpRequestOverrides []func(
-	*cobra.Command,
-	*serving.ExternalFunctionRequest,
-)
-
-func newHttpRequest() *cobra.Command {
-	cmd := &cobra.Command{}
-
-	var httpRequestReq serving.ExternalFunctionRequest
-
-	// TODO: short flags
-
-	cmd.Flags().StringVar(&httpRequestReq.Headers, "headers", httpRequestReq.Headers, `Additional headers for the request.`)
-	cmd.Flags().StringVar(&httpRequestReq.Json, "json", httpRequestReq.Json, `The JSON payload to send in the request body.`)
-	cmd.Flags().StringVar(&httpRequestReq.Params, "params", httpRequestReq.Params, `Query parameters for the request.`)
-
-	cmd.Use = "http-request CONNECTION_NAME METHOD PATH"
-	cmd.Short = `Make external services call using the credentials stored in UC Connection.`
-	cmd.Long = `Make external services call using the credentials stored in UC Connection.
-
-  Arguments:
-    CONNECTION_NAME: The connection name to use. This is required to identify the external
-      connection.
-    METHOD: The HTTP method to use (e.g., 'GET', 'POST').
-    PATH: The relative path for the API endpoint. This is required.`
-
-	// This command is being previewed; hide from help output.
-	cmd.Hidden = true
-
-	cmd.Annotations = make(map[string]string)
-
-	cmd.Args = func(cmd *cobra.Command, args []string) error {
-		check := root.ExactArgs(3)
-		return check(cmd, args)
-	}
-
-	cmd.PreRunE = root.MustWorkspaceClient
-	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
-		ctx := cmd.Context()
-		w := root.WorkspaceClient(ctx)
-
-		httpRequestReq.ConnectionName = args[0]
-		_, err = fmt.Sscan(args[1], &httpRequestReq.Method)
-		if err != nil {
-			return fmt.Errorf("invalid METHOD: %s", args[1])
-		}
-		httpRequestReq.Path = args[2]
-
-		response, err := w.ServingEndpoints.HttpRequest(ctx, httpRequestReq)
-		if err != nil {
-			return err
-		}
-		return cmdio.Render(ctx, response)
-	}
-
-	// Disable completions since they are not applicable.
-	// Can be overridden by manual implementation in `override.go`.
-	cmd.ValidArgsFunction = cobra.NoFileCompletions
-
-	// Apply optional overrides to this command.
-	for _, fn := range httpRequestOverrides {
-		fn(cmd, &httpRequestReq)
-	}
-
-	return cmd
-}
-
 // start list command
 
 // Slice with functions to override default command behavior.
@@ -938,7 +849,7 @@ func newPutAiGateway() *cobra.Command {
 	cmd.Long = `Update AI Gateway of a serving endpoint.
   
   Used to update the AI Gateway of a serving endpoint. NOTE: Only external model
-  and provisioned throughput endpoints are currently supported.
+  endpoints are currently supported.
 
   Arguments:
     NAME: The name of the serving endpoint whose AI Gateway is being updated. This
diff --git a/go.mod b/go.mod
index 4a3bf1620..0ef800d7b 100644
--- a/go.mod
+++ b/go.mod
@@ -7,7 +7,7 @@ toolchain go1.23.4
 require (
 	github.com/Masterminds/semver/v3 v3.3.1 // MIT
 	github.com/briandowns/spinner v1.23.1 // Apache 2.0
-	github.com/databricks/databricks-sdk-go v0.56.0 // Apache 2.0
+	github.com/databricks/databricks-sdk-go v0.55.0 // Apache 2.0
 	github.com/fatih/color v1.18.0 // MIT
 	github.com/google/uuid v1.6.0 // BSD-3-Clause
 	github.com/hashicorp/go-version v1.7.0 // MPL 2.0
diff --git a/go.sum b/go.sum
index b4e92c2c9..b1364cb26 100644
--- a/go.sum
+++ b/go.sum
@@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
 github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
 github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo=
 github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
-github.com/databricks/databricks-sdk-go v0.56.0 h1:8BsqjrSLbm2ET+/SLCN8qD+v+HFvs891dzi1OaiyRfc=
-github.com/databricks/databricks-sdk-go v0.56.0/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI=
+github.com/databricks/databricks-sdk-go v0.55.0 h1:ReziD6spzTDltM0ml80LggKo27F3oUjgTinCFDJDnak=
+github.com/databricks/databricks-sdk-go v0.55.0/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
diff --git a/integration/cmd/sync/sync_test.go b/integration/cmd/sync/sync_test.go
index 88e6ed89a..632497054 100644
--- a/integration/cmd/sync/sync_test.go
+++ b/integration/cmd/sync/sync_test.go
@@ -158,7 +158,7 @@ func (a *syncTest) remoteFileContent(ctx context.Context, relativePath, expected
 
 	var res []byte
 	a.c.Eventually(func() bool {
-		err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &res)
+		err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &res)
 		require.NoError(a.t, err)
 		actualContent := string(res)
 		return actualContent == expectedContent
diff --git a/libs/filer/files_client.go b/libs/filer/files_client.go
index 7102b6e29..88bbadd32 100644
--- a/libs/filer/files_client.go
+++ b/libs/filer/files_client.go
@@ -148,7 +148,7 @@ func (w *FilesClient) Write(ctx context.Context, name string, reader io.Reader,
 	overwrite := slices.Contains(mode, OverwriteIfExists)
 	urlPath = fmt.Sprintf("%s?overwrite=%t", urlPath, overwrite)
 	headers := map[string]string{"Content-Type": "application/octet-stream"}
-	err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, nil, reader, nil)
+	err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, reader, nil)
 
 	// Return early on success.
 	if err == nil {
@@ -176,7 +176,7 @@ func (w *FilesClient) Read(ctx context.Context, name string) (io.ReadCloser, err
 	}
 
 	var reader io.ReadCloser
-	err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &reader)
+	err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &reader)
 
 	// Return early on success.
 	if err == nil {
diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go
index f7e1b3adb..8d5148edd 100644
--- a/libs/filer/workspace_files_client.go
+++ b/libs/filer/workspace_files_client.go
@@ -106,7 +106,7 @@ func (info *wsfsFileInfo) MarshalJSON() ([]byte, error) {
 // as an interface to allow for mocking in tests.
 type apiClient interface {
 	Do(ctx context.Context, method, path string,
-		headers map[string]string, queryParams map[string]any, request, response any,
+		headers map[string]string, request, response any,
 		visitors ...func(*http.Request) error) error
 }
 
@@ -156,7 +156,7 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io
 		return err
 	}
 
-	err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, nil, body, nil)
+	err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, body, nil)
 
 	// Return early on success.
 	if err == nil {
@@ -341,7 +341,6 @@ func (w *WorkspaceFilesClient) Stat(ctx context.Context, name string) (fs.FileIn
 		http.MethodGet,
 		"/api/2.0/workspace/get-status",
 		nil,
-		nil,
 		map[string]string{
 			"path":               absPath,
 			"return_export_info": "true",
diff --git a/libs/filer/workspace_files_extensions_client_test.go b/libs/filer/workspace_files_extensions_client_test.go
index f9c65d6ee..9ea837fa9 100644
--- a/libs/filer/workspace_files_extensions_client_test.go
+++ b/libs/filer/workspace_files_extensions_client_test.go
@@ -17,7 +17,7 @@ type mockApiClient struct {
 }
 
 func (m *mockApiClient) Do(ctx context.Context, method, path string,
-	headers map[string]string, queryParams map[string]any, request, response any,
+	headers map[string]string, request, response any,
 	visitors ...func(*http.Request) error,
 ) error {
 	args := m.Called(ctx, method, path, headers, request, response, visitors)
diff --git a/libs/git/info.go b/libs/git/info.go
index dc4af9b6d..46e57be48 100644
--- a/libs/git/info.go
+++ b/libs/git/info.go
@@ -66,7 +66,6 @@ func fetchRepositoryInfoAPI(ctx context.Context, path string, w *databricks.Work
 		http.MethodGet,
 		apiEndpoint,
 		nil,
-		nil,
 		map[string]string{
 			"path":            path,
 			"return_git_info": "true",

From 8af9efaa621103308bd869662602b3724406c173 Mon Sep 17 00:00:00 2001
From: Andrew Nester <andrew.nester@databricks.com>
Date: Thu, 23 Jan 2025 14:58:18 +0100
Subject: [PATCH 10/39] Show an error when non-yaml files used in include
 section (#2201)

## Changes
`include` section is used only to include other bundle configuration
YAML files. If any other file type is used, raise an error and guide
users to use `sync.include` instead

## Tests
Added acceptance test

---------

Co-authored-by: Julia Crawford (Databricks) <julia.crawford@databricks.com>
---
 .../includes/non_yaml_in_include/databricks.yml |  6 ++++++
 .../includes/non_yaml_in_include/output.txt     | 10 ++++++++++
 .../bundle/includes/non_yaml_in_include/script  |  1 +
 .../bundle/includes/non_yaml_in_include/test.py |  1 +
 bundle/config/loader/process_root_includes.go   | 17 ++++++++++++++++-
 5 files changed, 34 insertions(+), 1 deletion(-)
 create mode 100644 acceptance/bundle/includes/non_yaml_in_include/databricks.yml
 create mode 100644 acceptance/bundle/includes/non_yaml_in_include/output.txt
 create mode 100644 acceptance/bundle/includes/non_yaml_in_include/script
 create mode 100644 acceptance/bundle/includes/non_yaml_in_include/test.py

diff --git a/acceptance/bundle/includes/non_yaml_in_include/databricks.yml b/acceptance/bundle/includes/non_yaml_in_include/databricks.yml
new file mode 100644
index 000000000..162bd6013
--- /dev/null
+++ b/acceptance/bundle/includes/non_yaml_in_include/databricks.yml
@@ -0,0 +1,6 @@
+bundle:
+  name: non_yaml_in_includes
+
+include:
+ - test.py
+ - resources/*.yml
diff --git a/acceptance/bundle/includes/non_yaml_in_include/output.txt b/acceptance/bundle/includes/non_yaml_in_include/output.txt
new file mode 100644
index 000000000..6006ca14e
--- /dev/null
+++ b/acceptance/bundle/includes/non_yaml_in_include/output.txt
@@ -0,0 +1,10 @@
+Error: Files in the 'include' configuration section must be YAML files.
+  in databricks.yml:5:4
+
+The file test.py in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
+
+Name: non_yaml_in_includes
+
+Found 1 error
+
+Exit code: 1
diff --git a/acceptance/bundle/includes/non_yaml_in_include/script b/acceptance/bundle/includes/non_yaml_in_include/script
new file mode 100644
index 000000000..72555b332
--- /dev/null
+++ b/acceptance/bundle/includes/non_yaml_in_include/script
@@ -0,0 +1 @@
+$CLI bundle validate
diff --git a/acceptance/bundle/includes/non_yaml_in_include/test.py b/acceptance/bundle/includes/non_yaml_in_include/test.py
new file mode 100644
index 000000000..44159b395
--- /dev/null
+++ b/acceptance/bundle/includes/non_yaml_in_include/test.py
@@ -0,0 +1 @@
+print("Hello world")
diff --git a/bundle/config/loader/process_root_includes.go b/bundle/config/loader/process_root_includes.go
index c608a3de6..198095742 100644
--- a/bundle/config/loader/process_root_includes.go
+++ b/bundle/config/loader/process_root_includes.go
@@ -2,6 +2,7 @@ package loader
 
 import (
 	"context"
+	"fmt"
 	"path/filepath"
 	"slices"
 	"strings"
@@ -36,6 +37,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
 	// Maintain list of files in order of files being loaded.
 	// This is stored in the bundle configuration for observability.
 	var files []string
+	var diags diag.Diagnostics
 
 	// For each glob, find all files to load.
 	// Ordering of the list of globs is maintained in the output.
@@ -60,7 +62,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
 
 		// Filter matches to ones we haven't seen yet.
 		var includes []string
-		for _, match := range matches {
+		for i, match := range matches {
 			rel, err := filepath.Rel(b.BundleRootPath, match)
 			if err != nil {
 				return diag.FromErr(err)
@@ -69,9 +71,22 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
 				continue
 			}
 			seen[rel] = true
+			if filepath.Ext(rel) != ".yaml" && filepath.Ext(rel) != ".yml" {
+				diags = diags.Append(diag.Diagnostic{
+					Severity:  diag.Error,
+					Summary:   "Files in the 'include' configuration section must be YAML files.",
+					Detail:    fmt.Sprintf("The file %s in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.", rel),
+					Locations: b.Config.GetLocations(fmt.Sprintf("include[%d]", i)),
+				})
+				continue
+			}
 			includes = append(includes, rel)
 		}
 
+		if len(diags) > 0 {
+			return diags
+		}
+
 		// Add matches to list of mutators to return.
 		slices.Sort(includes)
 		files = append(files, includes...)

From 0487e816cc8c20272d12cf2f0ddde85a8a258c74 Mon Sep 17 00:00:00 2001
From: Ilya Kuznetsov <ilya.kuznetsov@databricks.com>
Date: Thu, 23 Jan 2025 15:35:33 +0100
Subject: [PATCH 11/39] Reading variables from file (#2171)

## Changes

New source of default values for variables - variable file
`.databricks/bundle/<target>/variable-overrides.json`

CLI tries to stat and read that file every time during variable
initialisation phase

<!-- Summary of your changes that are easy to understand -->

## Tests

Acceptance tests
---
 acceptance/bundle/variables/empty/output.txt  |  2 +-
 .../bundle/variables/env_overrides/output.txt |  2 +-
 .../complex_to_string/variable-overrides.json |  5 ++
 .../bundle/default/variable-overrides.json    |  7 ++
 .../invalid_json/variable-overrides.json      |  1 +
 .../string_to_complex/variable-overrides.json |  3 +
 .../bundle/with_value/variable-overrides.json |  3 +
 .../without_defaults/variable-overrides.json  |  4 +
 .../variable-overrides.json                   |  3 +
 .../bundle/variables/file-defaults/.gitignore |  1 +
 .../variables/file-defaults/databricks.yml    | 53 ++++++++++++
 .../bundle/variables/file-defaults/output.txt | 82 +++++++++++++++++++
 .../bundle/variables/file-defaults/script     | 30 +++++++
 .../bundle/variables/vanilla/output.txt       |  2 +-
 acceptance/script.prepare                     |  5 ++
 bundle/config/mutator/set_variables.go        | 69 +++++++++++++++-
 bundle/config/mutator/set_variables_test.go   | 14 ++--
 bundle/config/variable/variable.go            | 11 +--
 18 files changed, 278 insertions(+), 19 deletions(-)
 create mode 100644 acceptance/bundle/variables/file-defaults/.databricks/bundle/complex_to_string/variable-overrides.json
 create mode 100644 acceptance/bundle/variables/file-defaults/.databricks/bundle/default/variable-overrides.json
 create mode 100644 acceptance/bundle/variables/file-defaults/.databricks/bundle/invalid_json/variable-overrides.json
 create mode 100644 acceptance/bundle/variables/file-defaults/.databricks/bundle/string_to_complex/variable-overrides.json
 create mode 100644 acceptance/bundle/variables/file-defaults/.databricks/bundle/with_value/variable-overrides.json
 create mode 100644 acceptance/bundle/variables/file-defaults/.databricks/bundle/without_defaults/variable-overrides.json
 create mode 100644 acceptance/bundle/variables/file-defaults/.databricks/bundle/wrong_file_structure/variable-overrides.json
 create mode 100644 acceptance/bundle/variables/file-defaults/.gitignore
 create mode 100644 acceptance/bundle/variables/file-defaults/databricks.yml
 create mode 100644 acceptance/bundle/variables/file-defaults/output.txt
 create mode 100644 acceptance/bundle/variables/file-defaults/script

diff --git a/acceptance/bundle/variables/empty/output.txt b/acceptance/bundle/variables/empty/output.txt
index 261635920..8933443df 100644
--- a/acceptance/bundle/variables/empty/output.txt
+++ b/acceptance/bundle/variables/empty/output.txt
@@ -1,4 +1,4 @@
-Error: no value assigned to required variable a. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_a environment variable
+Error: no value assigned to required variable a. Assignment can be done using "--var", by setting the BUNDLE_VAR_a environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
 
 Name: empty${var.a}
 Target: default
diff --git a/acceptance/bundle/variables/env_overrides/output.txt b/acceptance/bundle/variables/env_overrides/output.txt
index f42f82211..1ee9ef625 100644
--- a/acceptance/bundle/variables/env_overrides/output.txt
+++ b/acceptance/bundle/variables/env_overrides/output.txt
@@ -9,7 +9,7 @@
 "prod-a env-var-b"
 
 >>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
-Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
+Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
 
 Name: test bundle
 Target: env-missing-a-required-variable-assignment
diff --git a/acceptance/bundle/variables/file-defaults/.databricks/bundle/complex_to_string/variable-overrides.json b/acceptance/bundle/variables/file-defaults/.databricks/bundle/complex_to_string/variable-overrides.json
new file mode 100644
index 000000000..602567a68
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.databricks/bundle/complex_to_string/variable-overrides.json
@@ -0,0 +1,5 @@
+{
+  "cluster_key": {
+    "node_type_id": "Standard_DS3_v2"
+  }
+}
diff --git a/acceptance/bundle/variables/file-defaults/.databricks/bundle/default/variable-overrides.json b/acceptance/bundle/variables/file-defaults/.databricks/bundle/default/variable-overrides.json
new file mode 100644
index 000000000..3a865e120
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.databricks/bundle/default/variable-overrides.json
@@ -0,0 +1,7 @@
+{
+  "cluster": {
+    "node_type_id": "Standard_DS3_v2"
+  },
+  "cluster_key": "mlops_stacks-cluster",
+  "cluster_workers": 2
+}
diff --git a/acceptance/bundle/variables/file-defaults/.databricks/bundle/invalid_json/variable-overrides.json b/acceptance/bundle/variables/file-defaults/.databricks/bundle/invalid_json/variable-overrides.json
new file mode 100644
index 000000000..257cc5642
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.databricks/bundle/invalid_json/variable-overrides.json
@@ -0,0 +1 @@
+foo
diff --git a/acceptance/bundle/variables/file-defaults/.databricks/bundle/string_to_complex/variable-overrides.json b/acceptance/bundle/variables/file-defaults/.databricks/bundle/string_to_complex/variable-overrides.json
new file mode 100644
index 000000000..1ea719446
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.databricks/bundle/string_to_complex/variable-overrides.json
@@ -0,0 +1,3 @@
+{
+  "cluster": "mlops_stacks-cluster"
+}
diff --git a/acceptance/bundle/variables/file-defaults/.databricks/bundle/with_value/variable-overrides.json b/acceptance/bundle/variables/file-defaults/.databricks/bundle/with_value/variable-overrides.json
new file mode 100644
index 000000000..686d68548
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.databricks/bundle/with_value/variable-overrides.json
@@ -0,0 +1,3 @@
+{
+  "cluster_key": "mlops_stacks-cluster-from-file"
+}
diff --git a/acceptance/bundle/variables/file-defaults/.databricks/bundle/without_defaults/variable-overrides.json b/acceptance/bundle/variables/file-defaults/.databricks/bundle/without_defaults/variable-overrides.json
new file mode 100644
index 000000000..86166408e
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.databricks/bundle/without_defaults/variable-overrides.json
@@ -0,0 +1,4 @@
+{
+  "cluster_key": "mlops_stacks-cluster",
+  "cluster_workers": 2
+}
diff --git a/acceptance/bundle/variables/file-defaults/.databricks/bundle/wrong_file_structure/variable-overrides.json b/acceptance/bundle/variables/file-defaults/.databricks/bundle/wrong_file_structure/variable-overrides.json
new file mode 100644
index 000000000..de140ba36
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.databricks/bundle/wrong_file_structure/variable-overrides.json
@@ -0,0 +1,3 @@
+[
+  "foo"
+]
diff --git a/acceptance/bundle/variables/file-defaults/.gitignore b/acceptance/bundle/variables/file-defaults/.gitignore
new file mode 100644
index 000000000..bd1711fd1
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/.gitignore
@@ -0,0 +1 @@
+!.databricks
diff --git a/acceptance/bundle/variables/file-defaults/databricks.yml b/acceptance/bundle/variables/file-defaults/databricks.yml
new file mode 100644
index 000000000..5838843e1
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/databricks.yml
@@ -0,0 +1,53 @@
+bundle:
+  name: TestResolveVariablesFromFile
+
+variables:
+  cluster:
+    type: "complex"
+  cluster_key:
+  cluster_workers:
+
+resources:
+  jobs:
+    job1:
+      job_clusters:
+        - job_cluster_key: ${var.cluster_key}
+          new_cluster:
+            node_type_id: "${var.cluster.node_type_id}"
+            num_workers: ${var.cluster_workers}
+
+targets:
+  default:
+    default: true
+    variables:
+      cluster_workers: 1
+      cluster:
+        node_type_id: "default"
+      cluster_key: "default"
+
+  without_defaults:
+
+  complex_to_string:
+    variables:
+      cluster_workers: 1
+      cluster:
+        node_type_id: "default"
+      cluster_key: "default"
+
+  string_to_complex:
+    variables:
+      cluster_workers: 1
+      cluster:
+        node_type_id: "default"
+      cluster_key: "default"
+
+  wrong_file_structure:
+
+  invalid_json:
+
+  with_value:
+    variables:
+      cluster_workers: 1
+      cluster:
+        node_type_id: "default"
+      cluster_key: cluster_key_value
diff --git a/acceptance/bundle/variables/file-defaults/output.txt b/acceptance/bundle/variables/file-defaults/output.txt
new file mode 100644
index 000000000..73830aae3
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/output.txt
@@ -0,0 +1,82 @@
+
+=== variable file
+>>> $CLI bundle validate -o json
+{
+  "job_cluster_key": "mlops_stacks-cluster",
+  "new_cluster": {
+    "node_type_id": "Standard_DS3_v2",
+    "num_workers": 2
+  }
+}
+
+=== variable file and variable flag
+>>> $CLI bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
+{
+  "job_cluster_key": "mlops_stacks-cluster-overriden",
+  "new_cluster": {
+    "node_type_id": "Standard_DS3_v2",
+    "num_workers": 2
+  }
+}
+
+=== variable file and environment variable
+>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json
+{
+  "job_cluster_key": "mlops_stacks-cluster-overriden",
+  "new_cluster": {
+    "node_type_id": "Standard_DS3_v2",
+    "num_workers": 2
+  }
+}
+
+=== variable has value in config file
+>>> $CLI bundle validate -o json --target with_value
+{
+  "job_cluster_key": "mlops_stacks-cluster-from-file",
+  "new_cluster": {
+    "node_type_id": "default",
+    "num_workers": 1
+  }
+}
+
+=== file has variable that is complex but default is string
+>>> errcode $CLI bundle validate -o json --target complex_to_string
+Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type
+
+
+Exit code: 1
+{
+  "job_cluster_key": "${var.cluster_key}",
+  "new_cluster": {
+    "node_type_id": "${var.cluster.node_type_id}",
+    "num_workers": "${var.cluster_workers}"
+  }
+}
+
+=== file has variable that is string but default is complex
+>>> errcode $CLI bundle validate -o json --target string_to_complex
+Error: variable cluster is of type complex, but the value in the variable file is not a complex type
+
+
+Exit code: 1
+{
+  "job_cluster_key": "${var.cluster_key}",
+  "new_cluster": {
+    "node_type_id": "${var.cluster.node_type_id}",
+    "num_workers": "${var.cluster_workers}"
+  }
+}
+
+=== variable is required but it's not provided in the file
+>>> errcode $CLI bundle validate -o json --target without_defaults
+Error: no value assigned to required variable cluster. Assignment can be done using "--var", by setting the BUNDLE_VAR_cluster environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
+
+
+Exit code: 1
+{
+  "job_cluster_key": "${var.cluster_key}",
+  "new_cluster": {
+    "node_type_id": "${var.cluster.node_type_id}",
+    "num_workers": "${var.cluster_workers}"
+  }
+}
diff --git a/acceptance/bundle/variables/file-defaults/script b/acceptance/bundle/variables/file-defaults/script
new file mode 100644
index 000000000..c5b208755
--- /dev/null
+++ b/acceptance/bundle/variables/file-defaults/script
@@ -0,0 +1,30 @@
+cluster_expr=".resources.jobs.job1.job_clusters[0]"
+
+# defaults from variable file, see .databricks/bundle/<target>/variable-overrides.json
+
+title "variable file"
+trace $CLI bundle validate -o json | jq $cluster_expr
+
+title "variable file and variable flag"
+trace $CLI bundle validate -o json --var="cluster_key=mlops_stacks-cluster-overriden" | jq $cluster_expr
+
+title "variable file and environment variable"
+trace BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json | jq $cluster_expr
+
+title "variable has value in config file"
+trace $CLI bundle validate -o json --target with_value | jq $cluster_expr
+
+# title "file cannot be parsed"
+# trace errcode $CLI bundle validate -o json --target invalid_json | jq $cluster_expr
+
+# title "file has wrong structure"
+# trace errcode $CLI bundle validate -o json --target wrong_file_structure | jq $cluster_expr
+
+title "file has variable that is complex but default is string"
+trace errcode $CLI bundle validate -o json --target complex_to_string | jq $cluster_expr
+
+title "file has variable that is string but default is complex"
+trace errcode $CLI bundle validate -o json --target string_to_complex | jq $cluster_expr
+
+title "variable is required but it's not provided in the file"
+trace errcode $CLI bundle validate -o json --target without_defaults | jq $cluster_expr
diff --git a/acceptance/bundle/variables/vanilla/output.txt b/acceptance/bundle/variables/vanilla/output.txt
index 1d88bd060..e98882bb0 100644
--- a/acceptance/bundle/variables/vanilla/output.txt
+++ b/acceptance/bundle/variables/vanilla/output.txt
@@ -3,7 +3,7 @@
 "abc def"
 
 >>> errcode $CLI bundle validate
-Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
+Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
 
 Name: ${var.a} ${var.b}
 Target: default
diff --git a/acceptance/script.prepare b/acceptance/script.prepare
index 5900016d7..0567e433a 100644
--- a/acceptance/script.prepare
+++ b/acceptance/script.prepare
@@ -40,3 +40,8 @@ git-repo-init() {
     git add databricks.yml
     git commit -qm 'Add databricks.yml'
 }
+
+title() {
+    local label="$1"
+    printf "\n=== %s" "$label"
+}
diff --git a/bundle/config/mutator/set_variables.go b/bundle/config/mutator/set_variables.go
index 9e9f2dcfe..ac2f660a9 100644
--- a/bundle/config/mutator/set_variables.go
+++ b/bundle/config/mutator/set_variables.go
@@ -3,11 +3,14 @@ package mutator
 import (
 	"context"
 	"fmt"
+	"os"
+	"path/filepath"
 
 	"github.com/databricks/cli/bundle"
 	"github.com/databricks/cli/bundle/config/variable"
 	"github.com/databricks/cli/libs/diag"
 	"github.com/databricks/cli/libs/dyn"
+	"github.com/databricks/cli/libs/dyn/jsonloader"
 	"github.com/databricks/cli/libs/env"
 )
 
@@ -23,7 +26,11 @@ func (m *setVariables) Name() string {
 	return "SetVariables"
 }
 
-func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable, name string) (dyn.Value, error) {
+func getDefaultVariableFilePath(target string) string {
+	return ".databricks/bundle/" + target + "/variable-overrides.json"
+}
+
+func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable, name string, fileDefault dyn.Value) (dyn.Value, error) {
 	// case: variable already has value initialized, so skip
 	if variable.HasValue() {
 		return v, nil
@@ -49,6 +56,26 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
 		return v, nil
 	}
 
+	// case: Set the variable to the default value from the variable file
+	if fileDefault.Kind() != dyn.KindInvalid && fileDefault.Kind() != dyn.KindNil {
+		hasComplexType := variable.IsComplex()
+		hasComplexValue := fileDefault.Kind() == dyn.KindMap || fileDefault.Kind() == dyn.KindSequence
+
+		if hasComplexType && !hasComplexValue {
+			return dyn.InvalidValue, fmt.Errorf(`variable %s is of type complex, but the value in the variable file is not a complex type`, name)
+		}
+		if !hasComplexType && hasComplexValue {
+			return dyn.InvalidValue, fmt.Errorf(`variable %s is not of type complex, but the value in the variable file is a complex type`, name)
+		}
+
+		v, err := dyn.Set(v, "value", fileDefault)
+		if err != nil {
+			return dyn.InvalidValue, fmt.Errorf(`failed to assign default value from variable file to variable %s with error: %v`, name, err)
+		}
+
+		return v, nil
+	}
+
 	// case: Set the variable to its default value
 	if variable.HasDefault() {
 		vDefault, err := dyn.Get(v, "default")
@@ -64,10 +91,43 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
 	}
 
 	// We should have had a value to set for the variable at this point.
-	return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
+	return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done using "--var", by setting the %s environment variable, or in %s file`, name, bundleVarPrefix+name, getDefaultVariableFilePath("<target>"))
+}
+
+func readVariablesFromFile(b *bundle.Bundle) (dyn.Value, diag.Diagnostics) {
+	var diags diag.Diagnostics
+
+	filePath := filepath.Join(b.BundleRootPath, getDefaultVariableFilePath(b.Config.Bundle.Target))
+	if _, err := os.Stat(filePath); err != nil {
+		return dyn.InvalidValue, nil
+	}
+
+	f, err := os.ReadFile(filePath)
+	if err != nil {
+		return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to read variables file: %w", err))
+	}
+
+	val, err := jsonloader.LoadJSON(f, filePath)
+	if err != nil {
+		return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse variables file %s: %w", filePath, err))
+	}
+
+	if val.Kind() != dyn.KindMap {
+		return dyn.InvalidValue, diags.Append(diag.Diagnostic{
+			Severity: diag.Error,
+			Summary:  fmt.Sprintf("failed to parse variables file %s: invalid format", filePath),
+			Detail:   "Variables file must be a JSON object with the following format:\n{\"var1\": \"value1\", \"var2\": \"value2\"}",
+		})
+	}
+
+	return val, nil
 }
 
 func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
+	defaults, diags := readVariablesFromFile(b)
+	if diags.HasError() {
+		return diags
+	}
 	err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
 		return dyn.Map(v, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) {
 			name := p[1].Key()
@@ -76,9 +136,10 @@ func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
 				return dyn.InvalidValue, fmt.Errorf(`variable "%s" is not defined`, name)
 			}
 
-			return setVariable(ctx, variable, v, name)
+			fileDefault, _ := dyn.Get(defaults, name)
+			return setVariable(ctx, variable, v, name, fileDefault)
 		}))
 	})
 
-	return diag.FromErr(err)
+	return diags.Extend(diag.FromErr(err))
 }
diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go
index 07a5c8214..d904d5be3 100644
--- a/bundle/config/mutator/set_variables_test.go
+++ b/bundle/config/mutator/set_variables_test.go
@@ -25,7 +25,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
 	v, err := convert.FromTyped(variable, dyn.NilValue)
 	require.NoError(t, err)
 
-	v, err = setVariable(context.Background(), v, &variable, "foo")
+	v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
 	require.NoError(t, err)
 
 	err = convert.ToTyped(&variable, v)
@@ -43,7 +43,7 @@ func TestSetVariableUsingDefaultValue(t *testing.T) {
 	v, err := convert.FromTyped(variable, dyn.NilValue)
 	require.NoError(t, err)
 
-	v, err = setVariable(context.Background(), v, &variable, "foo")
+	v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
 	require.NoError(t, err)
 
 	err = convert.ToTyped(&variable, v)
@@ -65,7 +65,7 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
 	v, err := convert.FromTyped(variable, dyn.NilValue)
 	require.NoError(t, err)
 
-	v, err = setVariable(context.Background(), v, &variable, "foo")
+	v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
 	require.NoError(t, err)
 
 	err = convert.ToTyped(&variable, v)
@@ -90,7 +90,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
 	v, err := convert.FromTyped(variable, dyn.NilValue)
 	require.NoError(t, err)
 
-	v, err = setVariable(context.Background(), v, &variable, "foo")
+	v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
 	require.NoError(t, err)
 
 	err = convert.ToTyped(&variable, v)
@@ -107,8 +107,8 @@ func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
 	v, err := convert.FromTyped(variable, dyn.NilValue)
 	require.NoError(t, err)
 
-	_, err = setVariable(context.Background(), v, &variable, "foo")
-	assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable")
+	_, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
+	assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done using \"--var\", by setting the BUNDLE_VAR_foo environment variable, or in .databricks/bundle/<target>/variable-overrides.json file")
 }
 
 func TestSetVariablesMutator(t *testing.T) {
@@ -157,6 +157,6 @@ func TestSetComplexVariablesViaEnvVariablesIsNotAllowed(t *testing.T) {
 	v, err := convert.FromTyped(variable, dyn.NilValue)
 	require.NoError(t, err)
 
-	_, err = setVariable(context.Background(), v, &variable, "foo")
+	_, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
 	assert.ErrorContains(t, err, "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo")
 }
diff --git a/bundle/config/variable/variable.go b/bundle/config/variable/variable.go
index 95a68cfeb..d7f1cdede 100644
--- a/bundle/config/variable/variable.go
+++ b/bundle/config/variable/variable.go
@@ -36,11 +36,12 @@ type Variable struct {
 	// This field stores the resolved value for the variable. The variable are
 	// resolved in the following priority order (from highest to lowest)
 	//
-	// 1. Command line flag. For example: `--var="foo=bar"`
-	// 2. Target variable. eg: BUNDLE_VAR_foo=bar
-	// 3. Default value as defined in the applicable environments block
-	// 4. Default value defined in variable definition
-	// 5. Throw error, since if no default value is defined, then the variable
+	// 1. Command line flag `--var="foo=bar"`
+	// 2. Environment variable. eg: BUNDLE_VAR_foo=bar
+	// 3. Load defaults from .databricks/bundle/<target>/variable-overrides.json
+	// 4. Default value as defined in the applicable targets block
+	// 5. Default value defined in variable definition
+	// 6. Throw error, since if no default value is defined, then the variable
 	//    is required
 	Value VariableValue `json:"value,omitempty" bundle:"readonly"`
 

From d784147e994f71ea7b4e30a02daea66e73baea10 Mon Sep 17 00:00:00 2001
From: Andrew Nester <andrew.nester@databricks.com>
Date: Thu, 23 Jan 2025 16:54:55 +0100
Subject: [PATCH 12/39] [Release] Release v0.239.1 (#2218)

CLI:
* Added text output templates for apps list and list-deployments
([#2175](https://github.com/databricks/cli/pull/2175)).
* Fix duplicate "apps" entry in help output
([#2191](https://github.com/databricks/cli/pull/2191)).

Bundles:
* Allow yaml-anchors in schema
([#2200](https://github.com/databricks/cli/pull/2200)).
* Show an error when non-yaml files used in include section
([#2201](https://github.com/databricks/cli/pull/2201)).
* Set WorktreeRoot to sync root outside git repo
([#2197](https://github.com/databricks/cli/pull/2197)).
* fix: Detailed message for using source-linked deployment with
file_path specified
([#2119](https://github.com/databricks/cli/pull/2119)).
* Allow using variables in enum fields
([#2199](https://github.com/databricks/cli/pull/2199)).
* Add experimental-jobs-as-code template
([#2177](https://github.com/databricks/cli/pull/2177)).
* Reading variables from file
([#2171](https://github.com/databricks/cli/pull/2171)).
* Fixed an apps message order and added output test
([#2174](https://github.com/databricks/cli/pull/2174)).
* Default to forward slash-separated paths for path translation
([#2145](https://github.com/databricks/cli/pull/2145)).
* Include a materialized copy of built-in templates
([#2146](https://github.com/databricks/cli/pull/2146)).
---
 CHANGELOG.md | 20 ++++++++++++++++++++
 1 file changed, 20 insertions(+)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 53392e5db..255bfb0a8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,25 @@
 # Version changelog
 
+## [Release] Release v0.239.1
+
+CLI:
+ * Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)).
+ * Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)).
+
+Bundles:
+ * Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)).
+ * Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)).
+ * Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)).
+ * fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)).
+ * Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)).
+ * Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)).
+ * Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)).
+ * Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)).
+ * Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)).
+ * Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)).
+
+
+
 ## [Release] Release v0.239.0
 
 ### New feature announcement

From d6d9b994d46bdd11bdc17f215a4138558b626457 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Fri, 24 Jan 2025 10:47:12 +0100
Subject: [PATCH 13/39] acc: only print non-zero exit codes in errcode function
 (#2222)

Reduce noise in the output and matches how "Exit code" is handled for
the whole script.
---
 acceptance/bundle/paths/fallback/output.txt       | 2 --
 acceptance/bundle/paths/nominal/output.txt        | 2 --
 acceptance/bundle/variables/arg-repeat/output.txt | 2 --
 acceptance/script.prepare                         | 4 +++-
 4 files changed, 3 insertions(+), 7 deletions(-)

diff --git a/acceptance/bundle/paths/fallback/output.txt b/acceptance/bundle/paths/fallback/output.txt
index f694610d2..63121f3d7 100644
--- a/acceptance/bundle/paths/fallback/output.txt
+++ b/acceptance/bundle/paths/fallback/output.txt
@@ -1,8 +1,6 @@
 
 >>> $CLI bundle validate -t development -o json
 
-Exit code: 0
-
 >>> $CLI bundle validate -t error
 Error: notebook this value is overridden not found. Local notebook references are expected
 to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
diff --git a/acceptance/bundle/paths/nominal/output.txt b/acceptance/bundle/paths/nominal/output.txt
index 189170335..1badcdec6 100644
--- a/acceptance/bundle/paths/nominal/output.txt
+++ b/acceptance/bundle/paths/nominal/output.txt
@@ -1,8 +1,6 @@
 
 >>> $CLI bundle validate -t development -o json
 
-Exit code: 0
-
 >>> $CLI bundle validate -t error
 Error: notebook this value is overridden not found. Local notebook references are expected
 to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
diff --git a/acceptance/bundle/variables/arg-repeat/output.txt b/acceptance/bundle/variables/arg-repeat/output.txt
index 48bd2033f..2f9de1a3c 100644
--- a/acceptance/bundle/variables/arg-repeat/output.txt
+++ b/acceptance/bundle/variables/arg-repeat/output.txt
@@ -1,7 +1,5 @@
 
 >>> errcode $CLI bundle validate --var a=one -o json
-
-Exit code: 0
 {
   "a": {
     "default": "hello",
diff --git a/acceptance/script.prepare b/acceptance/script.prepare
index 0567e433a..87910654d 100644
--- a/acceptance/script.prepare
+++ b/acceptance/script.prepare
@@ -6,7 +6,9 @@ errcode() {
     local exit_code=$?
     # Re-enable 'set -e' if it was previously set
     set -e
-    >&2 printf "\nExit code: $exit_code\n"
+    if [ $exit_code -ne 0 ]; then
+        >&2 printf "\nExit code: $exit_code\n"
+    fi
 }
 
 trace() {

From b4ed23510490bcc16e15990c210598341d4657a6 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Fri, 24 Jan 2025 11:18:44 +0100
Subject: [PATCH 14/39] Include EvalSymlinks in SetPath and use SetPath on all
 paths (#2219)

## Changes
When adding path, a few things should take care of:
- symlink expansion
- forward/backward slashes, so that tests could do sed 's/\\\\/\//g' to
make it pass on Windows (see
acceptance/bundle/syncroot/dotdot-git/script)

SetPath() function takes care of both.

This PR uses SetPath() on all paths consistently.

## Tests
Existing tests.
---
 acceptance/acceptance_test.go |  9 ++-------
 libs/testdiff/replacement.go  | 16 ++++++++++++++--
 2 files changed, 16 insertions(+), 9 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 96c1f651c..0e7877dcf 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -93,13 +93,13 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
 	}
 
 	t.Setenv("CLI", execPath)
-	repls.Set(execPath, "$CLI")
+	repls.SetPath(execPath, "$CLI")
 
 	// Make helper scripts available
 	t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
 
 	tempHomeDir := t.TempDir()
-	repls.Set(tempHomeDir, "$TMPHOME")
+	repls.SetPath(tempHomeDir, "$TMPHOME")
 	t.Logf("$TMPHOME=%v", tempHomeDir)
 
 	// Prevent CLI from downloading terraform in each test:
@@ -187,11 +187,6 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
 		tmpDir = t.TempDir()
 	}
 
-	// Converts C:\Users\DENIS~1.BIL -> C:\Users\denis.bilenko
-	tmpDirEvalled, err1 := filepath.EvalSymlinks(tmpDir)
-	if err1 == nil && tmpDirEvalled != tmpDir {
-		repls.SetPathWithParents(tmpDirEvalled, "$TMPDIR")
-	}
 	repls.SetPathWithParents(tmpDir, "$TMPDIR")
 
 	scriptContents := readMergedScriptContents(t, dir)
diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go
index ca76b159c..865192662 100644
--- a/libs/testdiff/replacement.go
+++ b/libs/testdiff/replacement.go
@@ -94,6 +94,18 @@ func trimQuotes(s string) string {
 }
 
 func (r *ReplacementsContext) SetPath(old, new string) {
+	if old != "" && old != "." {
+		// Converts C:\Users\DENIS~1.BIL -> C:\Users\denis.bilenko
+		oldEvalled, err1 := filepath.EvalSymlinks(old)
+		if err1 == nil && oldEvalled != old {
+			r.SetPathNoEval(oldEvalled, new)
+		}
+	}
+
+	r.SetPathNoEval(old, new)
+}
+
+func (r *ReplacementsContext) SetPathNoEval(old, new string) {
 	r.Set(old, new)
 
 	if runtime.GOOS != "windows" {
@@ -133,7 +145,7 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont
 	r.Set(w.Config.Token, "$DATABRICKS_TOKEN")
 	r.Set(w.Config.Username, "$DATABRICKS_USERNAME")
 	r.Set(w.Config.Password, "$DATABRICKS_PASSWORD")
-	r.Set(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
+	r.SetPath(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
 	r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE")
 	r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT")
 	r.Set(w.Config.GoogleCredentials, "$GOOGLE_CREDENTIALS")
@@ -147,7 +159,7 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont
 	r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT")
 	r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID")
 	r.Set(w.Config.ClientSecret, "$DATABRICKS_CLIENT_SECRET")
-	r.Set(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
+	r.SetPath(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
 	// This is set to words like "path" that happen too frequently
 	// r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE")
 }

From a47a058506d874019887baea1006b587f47cbfdb Mon Sep 17 00:00:00 2001
From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com>
Date: Fri, 24 Jan 2025 16:35:00 +0530
Subject: [PATCH 15/39] Limit test server to only accept GET on read endpoints
 (#2225)

## Changes
Now the test server will only match GET queries for these endpoints

## Tests
Existing tests.
---
 acceptance/server_test.go | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)

diff --git a/acceptance/server_test.go b/acceptance/server_test.go
index 0d10fbea1..eb8cbb24a 100644
--- a/acceptance/server_test.go
+++ b/acceptance/server_test.go
@@ -68,7 +68,7 @@ func StartServer(t *testing.T) *TestServer {
 }
 
 func AddHandlers(server *TestServer) {
-	server.Handle("/api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
+	server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
 		return compute.ListPoliciesResponse{
 			Policies: []compute.Policy{
 				{
@@ -83,7 +83,7 @@ func AddHandlers(server *TestServer) {
 		}, nil
 	})
 
-	server.Handle("/api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
+	server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
 		return compute.ListInstancePools{
 			InstancePools: []compute.InstancePoolAndStats{
 				{
@@ -94,7 +94,7 @@ func AddHandlers(server *TestServer) {
 		}, nil
 	})
 
-	server.Handle("/api/2.1/clusters/list", func(r *http.Request) (any, error) {
+	server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, error) {
 		return compute.ListClustersResponse{
 			Clusters: []compute.ClusterDetails{
 				{
@@ -109,13 +109,13 @@ func AddHandlers(server *TestServer) {
 		}, nil
 	})
 
-	server.Handle("/api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
+	server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
 		return iam.User{
 			UserName: "tester@databricks.com",
 		}, nil
 	})
 
-	server.Handle("/api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
+	server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
 		return workspace.ObjectInfo{
 			ObjectId:   1001,
 			ObjectType: "DIRECTORY",
@@ -124,13 +124,13 @@ func AddHandlers(server *TestServer) {
 		}, nil
 	})
 
-	server.Handle("/api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
+	server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
 		return catalog.MetastoreAssignment{
 			DefaultCatalogName: "main",
 		}, nil
 	})
 
-	server.Handle("/api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
+	server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
 		return workspace.WorkspaceObjectPermissions{
 			ObjectId:   "1001",
 			ObjectType: "DIRECTORY",

From 959e43e556b2fc775feaf5d519000afdad17a815 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Fri, 24 Jan 2025 15:28:23 +0100
Subject: [PATCH 16/39] acc: Support per-test configuration; GOOS option to
 disable OS (#2227)

## Changes
- Acceptance tests load test.toml to configure test behaviour.
- If file is not found in the test directory, parents are searched,
until the test root.
- Currently there is one option: runtime.GOOS to switch off tests per
OS.

## Tests
Using it in https://github.com/databricks/cli/pull/2223 to disable test
on Windows that cannot be run there.
---
 NOTICE                        |  4 ++
 acceptance/acceptance_test.go |  7 +++
 acceptance/config_test.go     | 99 +++++++++++++++++++++++++++++++++++
 acceptance/test.toml          |  2 +
 go.mod                        |  1 +
 go.sum                        |  2 +
 6 files changed, 115 insertions(+)
 create mode 100644 acceptance/config_test.go
 create mode 100644 acceptance/test.toml

diff --git a/NOTICE b/NOTICE
index f6b59e0b0..ed22084cf 100644
--- a/NOTICE
+++ b/NOTICE
@@ -105,3 +105,7 @@ License - https://github.com/wI2L/jsondiff/blob/master/LICENSE
 https://github.com/hexops/gotextdiff
 Copyright (c) 2009 The Go Authors. All rights reserved.
 License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
+
+https://github.com/BurntSushi/toml
+Copyright (c) 2013 TOML authors
+https://github.com/BurntSushi/toml/blob/master/COPYING
diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 0e7877dcf..a1c41c5e6 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -175,6 +175,13 @@ func getTests(t *testing.T) []string {
 }
 
 func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
+	config, configPath := LoadConfig(t, dir)
+
+	isEnabled, isPresent := config.GOOS[runtime.GOOS]
+	if isPresent && !isEnabled {
+		t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
+	}
+
 	var tmpDir string
 	var err error
 	if KeepTmp {
diff --git a/acceptance/config_test.go b/acceptance/config_test.go
new file mode 100644
index 000000000..49dce06ba
--- /dev/null
+++ b/acceptance/config_test.go
@@ -0,0 +1,99 @@
+package acceptance_test
+
+import (
+	"os"
+	"path/filepath"
+	"sync"
+	"testing"
+
+	"github.com/BurntSushi/toml"
+	"github.com/stretchr/testify/require"
+)
+
+const configFilename = "test.toml"
+
+var (
+	configCache map[string]TestConfig
+	configMutex sync.Mutex
+)
+
+type TestConfig struct {
+	// Place to describe what's wrong with this test. Does not affect how the test is run.
+	Badness string
+
+	// Which OSes the test is enabled on. Each string is compared against runtime.GOOS.
+	// If absent, default to true.
+	GOOS map[string]bool
+}
+
+// FindConfig finds the closest config file.
+func FindConfig(t *testing.T, dir string) (string, bool) {
+	shared := false
+	for {
+		path := filepath.Join(dir, configFilename)
+		_, err := os.Stat(path)
+
+		if err == nil {
+			return path, shared
+		}
+
+		shared = true
+
+		if dir == "" || dir == "." {
+			break
+		}
+
+		if os.IsNotExist(err) {
+			dir = filepath.Dir(dir)
+			continue
+		}
+
+		t.Fatalf("Error while reading %s: %s", path, err)
+	}
+
+	t.Fatal("Config not found: " + configFilename)
+	return "", shared
+}
+
+// LoadConfig loads the config file. Non-leaf configs are cached.
+func LoadConfig(t *testing.T, dir string) (TestConfig, string) {
+	path, leafConfig := FindConfig(t, dir)
+
+	if leafConfig {
+		return DoLoadConfig(t, path), path
+	}
+
+	configMutex.Lock()
+	defer configMutex.Unlock()
+
+	if configCache == nil {
+		configCache = make(map[string]TestConfig)
+	}
+
+	result, ok := configCache[path]
+	if ok {
+		return result, path
+	}
+
+	result = DoLoadConfig(t, path)
+	configCache[path] = result
+	return result, path
+}
+
+func DoLoadConfig(t *testing.T, path string) TestConfig {
+	bytes, err := os.ReadFile(path)
+	if err != nil {
+		t.Fatalf("failed to read config: %s", err)
+	}
+
+	var config TestConfig
+	meta, err := toml.Decode(string(bytes), &config)
+	require.NoError(t, err)
+
+	keys := meta.Undecoded()
+	if len(keys) > 0 {
+		t.Fatalf("Undecoded keys in %s: %#v", path, keys)
+	}
+
+	return config
+}
diff --git a/acceptance/test.toml b/acceptance/test.toml
new file mode 100644
index 000000000..eee94d0ea
--- /dev/null
+++ b/acceptance/test.toml
@@ -0,0 +1,2 @@
+# If test directory nor any of its parents do not have test.toml then this file serves as fallback configuration.
+# The configurations are not merged across parents; the closest one is used fully.
diff --git a/go.mod b/go.mod
index 0ef800d7b..930963f89 100644
--- a/go.mod
+++ b/go.mod
@@ -5,6 +5,7 @@ go 1.23
 toolchain go1.23.4
 
 require (
+	github.com/BurntSushi/toml v1.4.0 // MIT
 	github.com/Masterminds/semver/v3 v3.3.1 // MIT
 	github.com/briandowns/spinner v1.23.1 // Apache 2.0
 	github.com/databricks/databricks-sdk-go v0.55.0 // Apache 2.0
diff --git a/go.sum b/go.sum
index b1364cb26..d025b3947 100644
--- a/go.sum
+++ b/go.sum
@@ -8,6 +8,8 @@ cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1h
 dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
 dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
+github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
 github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4=
 github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
 github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=

From f65508690d92301e0f6e27ce76a46d28780272ea Mon Sep 17 00:00:00 2001
From: Pieter Noordhuis <pieter.noordhuis@databricks.com>
Date: Fri, 24 Jan 2025 16:33:54 +0100
Subject: [PATCH 17/39] Update publish-winget action to use Komac directly
 (#2228)

## Changes

For the most recent release, I had to re-run the "publish-winget" action
a couple of times before it passed. The underlying issue that causes the
failure should be solved by the latest version of the action, but upon
inspection of the latest version, I found that it always installs the
latest version of [Komac](https://github.com/russellbanks/Komac). To
both fix the issue and lock this down further, I updated our action to
call Komac directly instead of relying on a separate action to do this
for us.

## Tests

Successful run in
https://github.com/databricks/cli/actions/runs/12951529979.
---
 .github/workflows/publish-winget.yml | 68 +++++++++++++++++++++++-----
 1 file changed, 57 insertions(+), 11 deletions(-)

diff --git a/.github/workflows/publish-winget.yml b/.github/workflows/publish-winget.yml
index eb9a72eda..cbd24856b 100644
--- a/.github/workflows/publish-winget.yml
+++ b/.github/workflows/publish-winget.yml
@@ -10,19 +10,65 @@ on:
 jobs:
   publish-to-winget-pkgs:
     runs-on:
-      group: databricks-protected-runner-group
-      labels: windows-server-latest
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
 
     environment: release
 
     steps:
-      - uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # v2
-        with:
-          identifier: Databricks.DatabricksCLI
-          installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
-          token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
-          fork-user: eng-dev-ecosystem-bot
+      - name: Checkout repository and submodules
+        uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
 
-          # Use the tag from the input, or the ref name if the input is not provided.
-          # The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
-          release-tag: ${{ inputs.tag || github.ref_name }}
+        # When updating the version of komac, make sure to update the checksum in the next step.
+        # Find both at https://github.com/russellbanks/Komac/releases.
+      - name: Download komac binary
+        run: |
+          curl -s -L -o $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz https://github.com/russellbanks/Komac/releases/download/v2.9.0/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz
+
+      - name: Verify komac binary
+        run: |
+          echo "d07a12831ad5418fee715488542a98ce3c0e591d05c850dd149fe78432be8c4c  $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz" | sha256sum -c -
+
+      - name: Untar komac binary to temporary path
+        run: |
+          mkdir -p $RUNNER_TEMP/komac
+          tar -xzf $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz -C $RUNNER_TEMP/komac
+
+      - name: Add komac to PATH
+        run: echo "$RUNNER_TEMP/komac" >> $GITHUB_PATH
+
+      - name: Confirm komac version
+        run: komac --version
+
+        # Use the tag from the input, or the ref name if the input is not provided.
+        # The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
+      - name: Strip "v" prefix from version
+        id: strip_version
+        run: echo "version=$(echo ${{ inputs.tag || github.ref_name }} | sed 's/^v//')" >> "$GITHUB_OUTPUT"
+
+      - name: Get URLs of signed Windows binaries
+        id: get_windows_urls
+        run: |
+          urls=$(
+            gh api https://api.github.com/repos/databricks/cli/releases/tags/${{ inputs.tag || github.ref_name }} | \
+            jq -r .assets[].browser_download_url | \
+            grep -E '_windows_.*-signed\.zip$' | \
+            tr '\n' ' '
+          )
+          if [ -z "$urls" ]; then
+            echo "No signed Windows binaries found" >&2
+            exit 1
+          fi
+          echo "urls=$urls" >> "$GITHUB_OUTPUT"
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+      - name: Publish to Winget
+        run: |
+          komac update Databricks.DatabricksCLI \
+            --version ${{ steps.strip_version.outputs.version }} \
+            --submit \
+            --urls ${{ steps.get_windows_urls.outputs.urls }} \
+        env:
+          KOMAC_FORK_OWNER: eng-dev-ecosystem-bot
+          GITHUB_TOKEN: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}

From 468660dc45bd1deac4d37fb914d4a6224aa1a27e Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Fri, 24 Jan 2025 16:53:06 +0100
Subject: [PATCH 18/39] Add an acc test covering failures when reading .git
 (#2223)

## Changes
- New test covering failures in reading .git. One case results in error,
some result in warning (not shown).
- New helper withdir runs commands in a subdirectory.

## Tests
New acceptance test.
---
 .../bundle/git-permerror/databricks.yml       |  2 +
 acceptance/bundle/git-permerror/output.txt    | 78 +++++++++++++++++++
 acceptance/bundle/git-permerror/script        | 25 ++++++
 acceptance/bundle/git-permerror/test.toml     |  5 ++
 acceptance/script.prepare                     | 11 +++
 5 files changed, 121 insertions(+)
 create mode 100644 acceptance/bundle/git-permerror/databricks.yml
 create mode 100644 acceptance/bundle/git-permerror/output.txt
 create mode 100644 acceptance/bundle/git-permerror/script
 create mode 100644 acceptance/bundle/git-permerror/test.toml

diff --git a/acceptance/bundle/git-permerror/databricks.yml b/acceptance/bundle/git-permerror/databricks.yml
new file mode 100644
index 000000000..83e0acda8
--- /dev/null
+++ b/acceptance/bundle/git-permerror/databricks.yml
@@ -0,0 +1,2 @@
+bundle:
+  name: git-permerror
diff --git a/acceptance/bundle/git-permerror/output.txt b/acceptance/bundle/git-permerror/output.txt
new file mode 100644
index 000000000..2b52134ab
--- /dev/null
+++ b/acceptance/bundle/git-permerror/output.txt
@@ -0,0 +1,78 @@
+=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.
+
+>>> chmod 000 .git
+
+>>> $CLI bundle validate
+Error: unable to load repository specific gitconfig: open config: permission denied
+
+Name: git-permerror
+Target: default
+Workspace:
+  User: $USERNAME
+  Path: /Workspace/Users/$USERNAME/.bundle/git-permerror/default
+
+Found 1 error
+
+Exit code: 1
+
+>>> $CLI bundle validate -o json
+Error: unable to load repository specific gitconfig: open config: permission denied
+
+
+Exit code: 1
+{
+  "bundle_root_path": ".",
+  "inferred": true
+}
+
+>>> withdir subdir/a/b $CLI bundle validate -o json
+Error: unable to load repository specific gitconfig: open config: permission denied
+
+
+Exit code: 1
+{
+  "bundle_root_path": ".",
+  "inferred": true
+}
+
+
+=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.
+
+>>> chmod 000 .git/HEAD
+
+>>> $CLI bundle validate -o json
+{
+  "bundle_root_path": ".",
+  "inferred": true
+}
+
+>>> withdir subdir/a/b $CLI bundle validate -o json
+{
+  "bundle_root_path": ".",
+  "inferred": true
+}
+
+
+=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.
+
+>>> chmod 000 .git/config
+
+>>> $CLI bundle validate -o json
+Error: unable to load repository specific gitconfig: open config: permission denied
+
+
+Exit code: 1
+{
+  "bundle_root_path": ".",
+  "inferred": true
+}
+
+>>> withdir subdir/a/b $CLI bundle validate -o json
+Error: unable to load repository specific gitconfig: open config: permission denied
+
+
+Exit code: 1
+{
+  "bundle_root_path": ".",
+  "inferred": true
+}
diff --git a/acceptance/bundle/git-permerror/script b/acceptance/bundle/git-permerror/script
new file mode 100644
index 000000000..782cbf5bc
--- /dev/null
+++ b/acceptance/bundle/git-permerror/script
@@ -0,0 +1,25 @@
+mkdir myrepo
+cd myrepo
+cp ../databricks.yml .
+git-repo-init
+mkdir -p subdir/a/b
+
+printf "=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.\n"
+trace chmod 000 .git
+errcode trace $CLI bundle validate
+errcode trace $CLI bundle validate -o json | jq .bundle.git
+errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
+
+printf "\n\n=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.\n"
+chmod 700 .git
+trace chmod 000 .git/HEAD
+errcode trace $CLI bundle validate -o json | jq .bundle.git
+errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
+
+printf "\n\n=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.\n"
+chmod 666 .git/HEAD
+trace chmod 000 .git/config
+errcode trace $CLI bundle validate -o json | jq .bundle.git
+errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
+
+rm -fr .git
diff --git a/acceptance/bundle/git-permerror/test.toml b/acceptance/bundle/git-permerror/test.toml
new file mode 100644
index 000000000..3f96e551c
--- /dev/null
+++ b/acceptance/bundle/git-permerror/test.toml
@@ -0,0 +1,5 @@
+Badness = "Warning logs not shown; inferred flag is set to true incorrect; bundle_root_path is not correct"
+
+[GOOS]
+# This test relies on chmod which does not work on Windows
+windows = false
diff --git a/acceptance/script.prepare b/acceptance/script.prepare
index 87910654d..b814a1260 100644
--- a/acceptance/script.prepare
+++ b/acceptance/script.prepare
@@ -47,3 +47,14 @@ title() {
     local label="$1"
     printf "\n=== %s" "$label"
 }
+
+withdir() {
+    local dir="$1"
+    shift
+    local orig_dir="$(pwd)"
+    cd "$dir" || return $?
+    "$@"
+    local exit_code=$?
+    cd "$orig_dir" || return $?
+    return $exit_code
+}

From b3d98fe66664cb85c750364afce9b1ea0785417f Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 08:45:09 +0100
Subject: [PATCH 19/39] acc: Print replacements on error and rm duplicates
 (#2230)

## Changes
- File comparison files in acceptance test, print the contents of all
applied replacements. Do it once per test.
- Remove duplicate entries in replacement list.

## Tests
Manually, change out files of existing test, you'll get this printed
once, after first assertion:

```
        acceptance_test.go:307: Available replacements:
            REPL /Users/denis\.bilenko/work/cli/acceptance/build/databricks => $$CLI
            REPL /private/var/folders/5y/9kkdnjw91p11vsqwk0cvmk200000gp/T/TestAccept598522733/001 => $$TMPHOME
            ...
```
---
 acceptance/acceptance_test.go | 17 ++++++++++++++---
 libs/testdiff/replacement.go  |  6 +++++-
 2 files changed, 19 insertions(+), 4 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index a1c41c5e6..11fd3f2ee 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -228,9 +228,11 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
 	formatOutput(out, err)
 	require.NoError(t, out.Close())
 
+	printedRepls := false
+
 	// Compare expected outputs
 	for relPath := range outputs {
-		doComparison(t, repls, dir, tmpDir, relPath)
+		doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
 	}
 
 	// Make sure there are not unaccounted for new files
@@ -245,12 +247,12 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
 		if strings.HasPrefix(relPath, "out") {
 			// We have a new file starting with "out"
 			// Show the contents & support overwrite mode for it:
-			doComparison(t, repls, dir, tmpDir, relPath)
+			doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
 		}
 	}
 }
 
-func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string) {
+func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
 	pathRef := filepath.Join(dirRef, relPath)
 	pathNew := filepath.Join(dirNew, relPath)
 	bufRef, okRef := readIfExists(t, pathRef)
@@ -295,6 +297,15 @@ func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirN
 		t.Logf("Overwriting existing output file: %s", relPath)
 		testutil.WriteFile(t, pathRef, valueNew)
 	}
+
+	if !equal && printedRepls != nil && !*printedRepls {
+		*printedRepls = true
+		var items []string
+		for _, item := range repls.Repls {
+			items = append(items, fmt.Sprintf("REPL %s => %s", item.Old, item.New))
+		}
+		t.Log("Available replacements:\n" + strings.Join(items, "\n"))
+	}
 }
 
 // Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go
index 865192662..b512374a3 100644
--- a/libs/testdiff/replacement.go
+++ b/libs/testdiff/replacement.go
@@ -76,7 +76,11 @@ func (r *ReplacementsContext) Set(old, new string) {
 	if err == nil {
 		encodedOld, err := json.Marshal(old)
 		if err == nil {
-			r.appendLiteral(trimQuotes(string(encodedOld)), trimQuotes(string(encodedNew)))
+			encodedStrNew := trimQuotes(string(encodedNew))
+			encodedStrOld := trimQuotes(string(encodedOld))
+			if encodedStrNew != new || encodedStrOld != old {
+				r.appendLiteral(encodedStrOld, encodedStrNew)
+			}
 		}
 	}
 

From 82b0dd36d682b1b11260e05e8a5c6aeccb65c255 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 09:17:22 +0100
Subject: [PATCH 20/39] Add acceptance/selftest, showcasing basic features
 (#2229)

Also make TestInprocessMode use this test.
---
 acceptance/README.md              |  2 ++
 acceptance/acceptance_test.go     |  7 +------
 acceptance/selftest/out.hello.txt |  1 +
 acceptance/selftest/output.txt    | 30 ++++++++++++++++++++++++++++++
 acceptance/selftest/script        | 21 +++++++++++++++++++++
 acceptance/selftest/test.toml     | 11 +++++++++++
 6 files changed, 66 insertions(+), 6 deletions(-)
 create mode 100644 acceptance/selftest/out.hello.txt
 create mode 100644 acceptance/selftest/output.txt
 create mode 100644 acceptance/selftest/script
 create mode 100644 acceptance/selftest/test.toml

diff --git a/acceptance/README.md b/acceptance/README.md
index 42a37d253..75ac1d5fc 100644
--- a/acceptance/README.md
+++ b/acceptance/README.md
@@ -17,3 +17,5 @@ For more complex tests one can also use:
 - `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
 - `trace` helper: prints the arguments before executing the command.
 - custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
+
+See [selftest](./selftest) for a toy test.
diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 11fd3f2ee..6b70c6a7f 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -60,12 +60,7 @@ func TestInprocessMode(t *testing.T) {
 	if InprocessMode {
 		t.Skip("Already tested by TestAccept")
 	}
-	if runtime.GOOS == "windows" {
-		// -  catalogs                               A catalog is the first layer of Unity Catalog’s three-level namespace.
-		// +  catalogs                               A catalog is the first layer of Unity Catalog�s three-level namespace.
-		t.Skip("Fails on CI on unicode characters")
-	}
-	require.NotZero(t, testAccept(t, true, "help"))
+	require.Equal(t, 1, testAccept(t, true, "selftest"))
 }
 
 func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
diff --git a/acceptance/selftest/out.hello.txt b/acceptance/selftest/out.hello.txt
new file mode 100644
index 000000000..e427984d4
--- /dev/null
+++ b/acceptance/selftest/out.hello.txt
@@ -0,0 +1 @@
+HELLO
diff --git a/acceptance/selftest/output.txt b/acceptance/selftest/output.txt
new file mode 100644
index 000000000..d1830e01f
--- /dev/null
+++ b/acceptance/selftest/output.txt
@@ -0,0 +1,30 @@
+=== Capturing STDERR
+>>> python3 -c import sys; sys.stderr.write("STDERR\n")
+STDERR
+
+=== Capturing STDOUT
+>>> python3 -c import sys; sys.stderr.write("STDOUT\n")
+STDOUT
+
+=== Capturing exit code
+>>> errcode python3 -c raise SystemExit(5)
+
+Exit code: 5
+
+=== Capturing exit code (alt)
+>>> python3 -c raise SystemExit(7)
+
+Exit code: 7
+
+=== Capturing pwd
+>>> python3 -c import os; print(os.getcwd())
+$TMPDIR
+
+=== Capturing subdir
+>>> mkdir -p subdir/a/b/c
+
+>>> withdir subdir/a/b/c python3 -c import os; print(os.getcwd())
+$TMPDIR/subdir/a/b/c
+
+=== Custom output files - everything starting with out is captured and compared
+>>> echo HELLO
diff --git a/acceptance/selftest/script b/acceptance/selftest/script
new file mode 100644
index 000000000..89201d925
--- /dev/null
+++ b/acceptance/selftest/script
@@ -0,0 +1,21 @@
+printf "=== Capturing STDERR"
+trace python3 -c 'import sys; sys.stderr.write("STDERR\n")'
+
+printf "\n=== Capturing STDOUT"
+trace python3 -c 'import sys; sys.stderr.write("STDOUT\n")'
+
+printf "\n=== Capturing exit code"
+trace errcode python3 -c 'raise SystemExit(5)'
+
+printf "\n=== Capturing exit code (alt)"
+errcode trace python3 -c 'raise SystemExit(7)'
+
+printf "\n=== Capturing pwd"
+trace python3 -c 'import os; print(os.getcwd())'
+
+printf "\n=== Capturing subdir"
+trace mkdir -p subdir/a/b/c
+trace withdir subdir/a/b/c python3 -c 'import os; print(os.getcwd())' | sed 's/\\/\//g'
+
+printf "\n=== Custom output files - everything starting with out is captured and compared"
+trace echo HELLO > out.hello.txt
diff --git a/acceptance/selftest/test.toml b/acceptance/selftest/test.toml
new file mode 100644
index 000000000..d867a4fd7
--- /dev/null
+++ b/acceptance/selftest/test.toml
@@ -0,0 +1,11 @@
+# Badness = "Brief description of what's wrong with the test output, if anything"
+
+#[GOOS]
+# Disable on Windows
+#windows = false
+
+# Disable on Mac
+#mac = false
+
+# Disable on Linux
+#linux = false

From 1cb32eca907872556b94890e3666ffac531a0f29 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 10:11:06 +0100
Subject: [PATCH 21/39] acc: Support custom replacements (#2231)

## Changes
- Ability to extend a list of replacements via test.toml
- Modify selftest to both demo this feature and to get rid of sed on
Windows.

## Tests
Acceptance tests. I'm also using it
https://github.com/databricks/cli/pull/2213 for things like pid.
---
 acceptance/acceptance_test.go  | 1 +
 acceptance/config_test.go      | 5 +++++
 acceptance/selftest/output.txt | 5 +++++
 acceptance/selftest/script     | 7 ++++++-
 acceptance/selftest/test.toml  | 9 +++++++++
 5 files changed, 26 insertions(+), 1 deletion(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 6b70c6a7f..e48bd9908 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -190,6 +190,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
 	}
 
 	repls.SetPathWithParents(tmpDir, "$TMPDIR")
+	repls.Repls = append(repls.Repls, config.Repls...)
 
 	scriptContents := readMergedScriptContents(t, dir)
 	testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
diff --git a/acceptance/config_test.go b/acceptance/config_test.go
index 49dce06ba..41866c4a7 100644
--- a/acceptance/config_test.go
+++ b/acceptance/config_test.go
@@ -7,6 +7,7 @@ import (
 	"testing"
 
 	"github.com/BurntSushi/toml"
+	"github.com/databricks/cli/libs/testdiff"
 	"github.com/stretchr/testify/require"
 )
 
@@ -24,6 +25,10 @@ type TestConfig struct {
 	// Which OSes the test is enabled on. Each string is compared against runtime.GOOS.
 	// If absent, default to true.
 	GOOS map[string]bool
+
+	// List of additional replacements to apply on this test.
+	// Old is a regexp, New is a replacement expression.
+	Repls []testdiff.Replacement
 }
 
 // FindConfig finds the closest config file.
diff --git a/acceptance/selftest/output.txt b/acceptance/selftest/output.txt
index d1830e01f..9fdfbc1e7 100644
--- a/acceptance/selftest/output.txt
+++ b/acceptance/selftest/output.txt
@@ -28,3 +28,8 @@ $TMPDIR/subdir/a/b/c
 
 === Custom output files - everything starting with out is captured and compared
 >>> echo HELLO
+
+=== Custom regex can be specified in [[Repl]] section
+1234
+CUSTOM_NUMBER_REGEX
+123456
diff --git a/acceptance/selftest/script b/acceptance/selftest/script
index 89201d925..665726167 100644
--- a/acceptance/selftest/script
+++ b/acceptance/selftest/script
@@ -15,7 +15,12 @@ trace python3 -c 'import os; print(os.getcwd())'
 
 printf "\n=== Capturing subdir"
 trace mkdir -p subdir/a/b/c
-trace withdir subdir/a/b/c python3 -c 'import os; print(os.getcwd())' | sed 's/\\/\//g'
+trace withdir subdir/a/b/c python3 -c 'import os; print(os.getcwd())'
 
 printf "\n=== Custom output files - everything starting with out is captured and compared"
 trace echo HELLO > out.hello.txt
+
+printf "\n=== Custom regex can be specified in [[Repl]] section\n"
+echo 1234
+echo 12345
+echo 123456
diff --git a/acceptance/selftest/test.toml b/acceptance/selftest/test.toml
index d867a4fd7..9607ec5df 100644
--- a/acceptance/selftest/test.toml
+++ b/acceptance/selftest/test.toml
@@ -9,3 +9,12 @@
 
 # Disable on Linux
 #linux = false
+
+[[Repls]]
+Old = '\b[0-9]{5}\b'
+New = "CUSTOM_NUMBER_REGEX"
+
+[[Repls]]
+# Fix path with reverse slashes in the output for Windows.
+Old = '\$TMPDIR\\subdir\\a\\b\\c'
+New = '$$TMPDIR/subdir/a/b/c'

From 6e8f0ea8afeecf86c3edd42d0ccccbacf25353d2 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 11:33:16 +0100
Subject: [PATCH 22/39] CI: Move ruff to 'lint' job (#2232)

This is where it belongs and also there is no need to run it 3 times.
---
 .github/workflows/push.yml | 13 ++++++-------
 1 file changed, 6 insertions(+), 7 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index c3a314d69..2a8a68862 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -60,12 +60,6 @@ jobs:
       - name: Install uv
         uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
 
-      - name: Run ruff
-        uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
-        with:
-          version: "0.9.1"
-          args: "format --check"
-
       - name: Set go env
         run: |
           echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV
@@ -80,7 +74,7 @@ jobs:
       - name: Run tests with coverage
         run: make cover
 
-  golangci:
+  linters:
     needs: cleanups
     name: lint
     runs-on: ubuntu-latest
@@ -105,6 +99,11 @@ jobs:
         with:
           version: v1.63.4
           args: --timeout=15m
+      - name: Run ruff
+        uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
+        with:
+          version: "0.9.1"
+          args: "format --check"
 
   validate-bundle-schema:
     needs: cleanups

From b7dd70b8b3c59d64ab7b54805750b532b0d75f07 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 13:22:40 +0100
Subject: [PATCH 23/39] acc: Add a couple of error tests for 'bundle init'
 (#2233)

This captures how we log errors related to subprocess run and what does
the output look like.
---
 acceptance/bundle/templates/wrong-path/output.txt | 3 +++
 acceptance/bundle/templates/wrong-path/script     | 2 ++
 acceptance/bundle/templates/wrong-path/test.toml  | 1 +
 acceptance/bundle/templates/wrong-url/output.txt  | 5 +++++
 acceptance/bundle/templates/wrong-url/script      | 2 ++
 acceptance/bundle/templates/wrong-url/test.toml   | 7 +++++++
 6 files changed, 20 insertions(+)
 create mode 100644 acceptance/bundle/templates/wrong-path/output.txt
 create mode 100644 acceptance/bundle/templates/wrong-path/script
 create mode 100644 acceptance/bundle/templates/wrong-path/test.toml
 create mode 100644 acceptance/bundle/templates/wrong-url/output.txt
 create mode 100644 acceptance/bundle/templates/wrong-url/script
 create mode 100644 acceptance/bundle/templates/wrong-url/test.toml

diff --git a/acceptance/bundle/templates/wrong-path/output.txt b/acceptance/bundle/templates/wrong-path/output.txt
new file mode 100644
index 000000000..0a6fdfc84
--- /dev/null
+++ b/acceptance/bundle/templates/wrong-path/output.txt
@@ -0,0 +1,3 @@
+Error: not a bundle template: expected to find a template schema file at databricks_template_schema.json
+
+Exit code: 1
diff --git a/acceptance/bundle/templates/wrong-path/script b/acceptance/bundle/templates/wrong-path/script
new file mode 100644
index 000000000..00c05927a
--- /dev/null
+++ b/acceptance/bundle/templates/wrong-path/script
@@ -0,0 +1,2 @@
+export NO_COLOR=1
+$CLI bundle init /DOES/NOT/EXIST
diff --git a/acceptance/bundle/templates/wrong-path/test.toml b/acceptance/bundle/templates/wrong-path/test.toml
new file mode 100644
index 000000000..4bbcb5100
--- /dev/null
+++ b/acceptance/bundle/templates/wrong-path/test.toml
@@ -0,0 +1 @@
+Badness = 'The error message should include full path: "expected to find a template schema file at databricks_template_schema.json"'
diff --git a/acceptance/bundle/templates/wrong-url/output.txt b/acceptance/bundle/templates/wrong-url/output.txt
new file mode 100644
index 000000000..b78cf4b68
--- /dev/null
+++ b/acceptance/bundle/templates/wrong-url/output.txt
@@ -0,0 +1,5 @@
+Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world $TMPDIR_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '$TMPDIR_GPARENT/world-123456'...
+fatal: unable to access 'https://invalid-domain-123.databricks.com/hello/world/': Could not resolve host: invalid-domain-123.databricks.com
+
+
+Exit code: 1
diff --git a/acceptance/bundle/templates/wrong-url/script b/acceptance/bundle/templates/wrong-url/script
new file mode 100644
index 000000000..e9bc0f4f6
--- /dev/null
+++ b/acceptance/bundle/templates/wrong-url/script
@@ -0,0 +1,2 @@
+export NO_COLOR=1
+$CLI bundle init https://invalid-domain-123.databricks.com/hello/world
diff --git a/acceptance/bundle/templates/wrong-url/test.toml b/acceptance/bundle/templates/wrong-url/test.toml
new file mode 100644
index 000000000..0bb24bf1a
--- /dev/null
+++ b/acceptance/bundle/templates/wrong-url/test.toml
@@ -0,0 +1,7 @@
+[[Repls]]
+Old = '\\'
+New = '/'
+
+[[Repls]]
+Old = '/world-[0-9]+'
+New = '/world-123456'

From 4595c6f1b5d4890b6c9a1e13257319d52954dfe5 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 27 Jan 2025 14:11:07 +0100
Subject: [PATCH 24/39] Bump github.com/databricks/databricks-sdk-go from
 0.55.0 to 0.56.1 (#2238)

Bumps
[github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go)
from 0.55.0 to 0.56.1.
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's
releases</a>.</em></p>
<blockquote>
<h2>v0.56.1</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Do not send query parameters when set to zero value (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1136">#1136</a>).</li>
</ul>
<h2>v0.56.0</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Support Query parameters for all HTTP operations (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1124">#1124</a>).</li>
</ul>
<h3>Internal Changes</h3>
<ul>
<li>Add download target to MakeFile (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1125">#1125</a>).</li>
<li>Delete examples/mocking module (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1126">#1126</a>).</li>
<li>Scope the traversing directory in the Recursive list workspace test
(<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1120">#1120</a>).</li>
</ul>
<h3>API Changes:</h3>
<ul>
<li>Added <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/iam#AccessControlAPI">w.AccessControl</a>
workspace-level service.</li>
<li>Added <code>HttpRequest</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Added <code>ReviewState</code>, <code>Reviews</code> and
<code>RunnerCollaborators</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetNotebook">cleanrooms.CleanRoomAssetNotebook</a>.</li>
<li>Added <code>CleanRoomsNotebookOutput</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#RunOutput">jobs.RunOutput</a>.</li>
<li>Added <code>RunAsRepl</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SparkJarTask">jobs.SparkJarTask</a>.</li>
<li>Added <code>Scopes</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/oauth2#UpdateCustomAppIntegration">oauth2.UpdateCustomAppIntegration</a>.</li>
<li>Added <code>Contents</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GetOpenApiResponse">serving.GetOpenApiResponse</a>.</li>
<li>Added <code>Activated</code>, <code>ActivationUrl</code>,
<code>AuthenticationType</code>, <code>Cloud</code>,
<code>Comment</code>, <code>CreatedAt</code>, <code>CreatedBy</code>,
<code>DataRecipientGlobalMetastoreId</code>, <code>IpAccessList</code>,
<code>MetastoreId</code>, <code>Name</code>, <code>Owner</code>,
<code>PropertiesKvpairs</code>, <code>Region</code>,
<code>SharingCode</code>, <code>Tokens</code>, <code>UpdatedAt</code>
and <code>UpdatedBy</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>ExpirationTime</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>Pending</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetStatusEnum">cleanrooms.CleanRoomAssetStatusEnum</a>.</li>
<li>Added <code>AddNodesFailed</code>,
<code>AutomaticClusterUpdate</code>, <code>AutoscalingBackoff</code> and
<code>AutoscalingFailed</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/compute#EventType">compute.EventType</a>.</li>
<li>Added <code>PendingWarehouse</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#MessageStatus">dashboards.MessageStatus</a>.</li>
<li>Added <code>Cpu</code>, <code>GpuLarge</code>,
<code>GpuMedium</code>, <code>GpuSmall</code> and
<code>MultigpuMedium</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to type <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service.</li>
<li>Changed <code>Create</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service with new required argument order.</li>
<li>Changed <code>GetOpenApi</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to type <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags">serving.EndpointTags</a>.</li>
<li>Changed <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTagList">serving.EndpointTagList</a>
to.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Config</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#CreateServingEndpoint">serving.CreateServingEndpoint</a>
to no longer be required.</li>
<li>Changed <code>ProjectId</code> and <code>Region</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig">serving.GoogleCloudVertexAiConfig</a>
to be required.</li>
<li>Changed <code>ProjectId</code> and <code>Region</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GoogleCloudVertexAiConfig">serving.GoogleCloudVertexAiConfig</a>
to be required.</li>
<li>Changed <code>WorkloadType</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityInput">serving.ServedEntityInput</a>
to type <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
<li>Changed <code>WorkloadType</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServedEntityOutput">serving.ServedEntityOutput</a>
to type <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
</ul>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Changelog</summary>
<p><em>Sourced from <a
href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's
changelog</a>.</em></p>
<blockquote>
<h2>[Release] Release v0.56.1</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Do not send query parameters when set to zero value (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1136">#1136</a>).</li>
</ul>
<h2>[Release] Release v0.56.0</h2>
<h3>Bug Fixes</h3>
<ul>
<li>Support Query parameters for all HTTP operations (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1124">#1124</a>).</li>
</ul>
<h3>Internal Changes</h3>
<ul>
<li>Add download target to MakeFile (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1125">#1125</a>).</li>
<li>Delete examples/mocking module (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1126">#1126</a>).</li>
<li>Scope the traversing directory in the Recursive list workspace test
(<a
href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1120">#1120</a>).</li>
</ul>
<h3>API Changes:</h3>
<ul>
<li>Added <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/iam#AccessControlAPI">w.AccessControl</a>
workspace-level service.</li>
<li>Added <code>HttpRequest</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Added <code>ReviewState</code>, <code>Reviews</code> and
<code>RunnerCollaborators</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetNotebook">cleanrooms.CleanRoomAssetNotebook</a>.</li>
<li>Added <code>CleanRoomsNotebookOutput</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#RunOutput">jobs.RunOutput</a>.</li>
<li>Added <code>RunAsRepl</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SparkJarTask">jobs.SparkJarTask</a>.</li>
<li>Added <code>Scopes</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/oauth2#UpdateCustomAppIntegration">oauth2.UpdateCustomAppIntegration</a>.</li>
<li>Added <code>Contents</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#GetOpenApiResponse">serving.GetOpenApiResponse</a>.</li>
<li>Added <code>Activated</code>, <code>ActivationUrl</code>,
<code>AuthenticationType</code>, <code>Cloud</code>,
<code>Comment</code>, <code>CreatedAt</code>, <code>CreatedBy</code>,
<code>DataRecipientGlobalMetastoreId</code>, <code>IpAccessList</code>,
<code>MetastoreId</code>, <code>Name</code>, <code>Owner</code>,
<code>PropertiesKvpairs</code>, <code>Region</code>,
<code>SharingCode</code>, <code>Tokens</code>, <code>UpdatedAt</code>
and <code>UpdatedBy</code> fields for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>ExpirationTime</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Added <code>Pending</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomAssetStatusEnum">cleanrooms.CleanRoomAssetStatusEnum</a>.</li>
<li>Added <code>AddNodesFailed</code>,
<code>AutomaticClusterUpdate</code>, <code>AutoscalingBackoff</code> and
<code>AutoscalingFailed</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/compute#EventType">compute.EventType</a>.</li>
<li>Added <code>PendingWarehouse</code> enum value for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#MessageStatus">dashboards.MessageStatus</a>.</li>
<li>Added <code>Cpu</code>, <code>GpuLarge</code>,
<code>GpuMedium</code>, <code>GpuSmall</code> and
<code>MultigpuMedium</code> enum values for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingModelWorkloadType">serving.ServingModelWorkloadType</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientInfo">sharing.RecipientInfo</a>.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service to type <code>Update</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sharing#RecipientsAPI">w.Recipients</a>
workspace-level service.</li>
<li>Changed <code>Create</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service with new required argument order.</li>
<li>Changed <code>GetOpenApi</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service return type to become non-empty.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to type <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service.</li>
<li>Changed <code>Patch</code> method for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#ServingEndpointsAPI">w.ServingEndpoints</a>
workspace-level service to return <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTags">serving.EndpointTags</a>.</li>
<li>Changed <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#EndpointTagList">serving.EndpointTagList</a>
to.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>CollaboratorAlias</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/cleanrooms#CleanRoomCollaborator">cleanrooms.CleanRoomCollaborator</a>
to be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Behavior</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#AiGatewayGuardrailPiiBehavior">serving.AiGatewayGuardrailPiiBehavior</a>
to no longer be required.</li>
<li>Changed <code>Config</code> field for <a
href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/serving#CreateServingEndpoint">serving.CreateServingEndpoint</a>
to no longer be required.</li>
</ul>
<!-- raw HTML omitted -->
</blockquote>
<p>... (truncated)</p>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/bf617bb7a6f46370b94886dd674e4721b17224fd"><code>bf617bb</code></a>
[Release] Release v0.56.1 (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1137">#1137</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/18cebf1d5ca8889ae82f660c96fecc8bc5b73be5"><code>18cebf1</code></a>
[Fix] Do not send query parameters when set to zero value (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1136">#1136</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/28ff749ee2271172ceda01aaaa6e997e8c2aebd7"><code>28ff749</code></a>
[Release] Release v0.56.0 (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1134">#1134</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/113454080f34e4da04782895ea5d61101bf2b425"><code>1134540</code></a>
[Internal] Add download target to MakeFile (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1125">#1125</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/e079db96f33d53d6659b222a905da366dbab576b"><code>e079db9</code></a>
[Fix] Support Query parameters for all HTTP operations (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1124">#1124</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/1045fb9697db505f5fd1ca0ebe4be8b6479df981"><code>1045fb9</code></a>
[Internal] Delete examples/mocking module (<a
href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1126">#1126</a>)</li>
<li><a
href="https://github.com/databricks/databricks-sdk-go/commit/914ab6b7e8e48ca6da6803c10c2d720ba496cd87"><code>914ab6b</code></a>
[Internal] Scope the traversing directory in the Recursive list
workspace tes...</li>
<li>See full diff in <a
href="https://github.com/databricks/databricks-sdk-go/compare/v0.55.0...v0.56.1">compare
view</a></li>
</ul>
</details>
<br />

<details>
<summary>Most Recent Ignore Conditions Applied to This Pull
Request</summary>

| Dependency Name | Ignore Conditions |
| --- | --- |
| github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] |
</details>


[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.55.0&new-version=0.56.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)

Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.

[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)

---

<details>
<summary>Dependabot commands and options</summary>
<br />

You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show <dependency name> ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)


</details>

---------

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
---
 .codegen/_openapi_sha                         |   2 +-
 .codegen/service.go.tmpl                      |  20 +-
 .gitattributes                                |   1 +
 bundle/deploy/terraform/convert_test.go       |   4 +-
 .../convert_model_serving_endpoint_test.go    |   2 +-
 .../internal/schema/annotations_openapi.yml   | 367 +++++++++++-------
 .../schema/annotations_openapi_overrides.yml  |  11 +
 bundle/schema/jsonschema.json                 | 175 +++++----
 .../custom-app-integration.go                 |   1 +
 cmd/api/api.go                                |   2 +-
 .../access-control/access-control.go          | 109 ++++++
 cmd/workspace/cmd.go                          |   2 +
 cmd/workspace/providers/providers.go          |   4 +-
 cmd/workspace/recipients/recipients.go        |  96 ++---
 .../serving-endpoints/serving-endpoints.go    | 111 +++++-
 go.mod                                        |   2 +-
 go.sum                                        |   4 +-
 integration/cmd/sync/sync_test.go             |   2 +-
 libs/filer/files_client.go                    |   4 +-
 libs/filer/workspace_files_client.go          |   5 +-
 .../workspace_files_extensions_client_test.go |   2 +-
 libs/git/info.go                              |   1 +
 22 files changed, 588 insertions(+), 339 deletions(-)
 create mode 100755 cmd/workspace/access-control/access-control.go

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index dfe78790a..588cf9d63 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-779817ed8d63031f5ea761fbd25ee84f38feec0d
\ No newline at end of file
+0be1b914249781b5e903b7676fd02255755bc851
\ No newline at end of file
diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl
index 0c9fa089a..2f4987b13 100644
--- a/.codegen/service.go.tmpl
+++ b/.codegen/service.go.tmpl
@@ -109,16 +109,19 @@ var {{.CamelName}}Overrides []func(
 	{{- end }}
 )
 
+{{- $excludeFromJson := list "http-request"}}
+
 func new{{.PascalName}}() *cobra.Command {
 	cmd := &cobra.Command{}
 
+	{{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}}
 	{{- if .Request}}
 
 	var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
 	{{- if .RequestBodyField }}
 	{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
 	{{- end }}
-	{{- if .CanUseJson}}
+	{{- if $canUseJson}}
 	var {{.CamelName}}Json flags.JsonFlag
 	{{- end}}
 	{{- end}}
@@ -135,7 +138,7 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $request = .RequestBodyField.Entity -}}
 	{{- end -}}
     {{if $request }}// TODO: short flags
-	{{- if .CanUseJson}}
+	{{- if $canUseJson}}
 	cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
 	{{- end}}
     {{$method := .}}
@@ -177,7 +180,7 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $hasRequiredArgs :=  and (not $hasIdPrompt) $hasPosArgs -}}
 	{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields))  -}}
 	{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
-	{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}}
+	{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
 	{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
 
 	{{- $atleastOneArgumentWithDescription := false -}}
@@ -239,7 +242,7 @@ func new{{.PascalName}}() *cobra.Command {
 		ctx := cmd.Context()
 		{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
 		{{- if .Request }}
-			{{ if .CanUseJson }}
+			{{ if $canUseJson }}
 			if cmd.Flags().Changed("json") {
 					diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
 					if diags.HasError() {
@@ -255,7 +258,7 @@ func new{{.PascalName}}() *cobra.Command {
 				return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
 			}{{- end}}
 			{{- if $hasPosArgs }}
-			{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
+			{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
 			{{- end}}
 			{{- if $hasIdPrompt}}
 				if len(args) == 0 {
@@ -279,9 +282,9 @@ func new{{.PascalName}}() *cobra.Command {
 
 			{{$method := .}}
 			{{- range $arg, $field := .RequiredPositionalArguments}}
-				{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}}
+				{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}}
 			{{- end -}}
-			{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
+			{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
 			}
 			{{- end}}
 
@@ -392,7 +395,8 @@ func new{{.PascalName}}() *cobra.Command {
 	{{- $method := .Method -}}
 	{{- $arg := .Arg -}}
 	{{- $hasIdPrompt := .HasIdPrompt -}}
-	{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }}
+	{{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }}
+	{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }}
 	{{- if $optionalIfJsonIsUsed  }}
 	if !cmd.Flags().Changed("json") {
 	{{- end }}
diff --git a/.gitattributes b/.gitattributes
index 0a8ddf3cb..ebe94ed8e 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -31,6 +31,7 @@ cmd/account/users/users.go linguist-generated=true
 cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
 cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
 cmd/account/workspaces/workspaces.go linguist-generated=true
+cmd/workspace/access-control/access-control.go linguist-generated=true
 cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
 cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
 cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go
index ffe55db71..afc1fb22a 100644
--- a/bundle/deploy/terraform/convert_test.go
+++ b/bundle/deploy/terraform/convert_test.go
@@ -419,7 +419,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
 	src := resources.ModelServingEndpoint{
 		CreateServingEndpoint: &serving.CreateServingEndpoint{
 			Name: "name",
-			Config: serving.EndpointCoreConfigInput{
+			Config: &serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
@@ -474,7 +474,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
 			// and as such observed the `omitempty` tag.
 			// The new method leverages [dyn.Value] where any field that is not
 			// explicitly set is not part of the value.
-			Config: serving.EndpointCoreConfigInput{
+			Config: &serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
index d46350bb7..98cf2dc22 100644
--- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
+++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go
@@ -17,7 +17,7 @@ func TestConvertModelServingEndpoint(t *testing.T) {
 	src := resources.ModelServingEndpoint{
 		CreateServingEndpoint: &serving.CreateServingEndpoint{
 			Name: "name",
-			Config: serving.EndpointCoreConfigInput{
+			Config: &serving.EndpointCoreConfigInput{
 				ServedModels: []serving.ServedModelInput{
 					{
 						ModelName:          "model_name",
diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml
index 8ff5c9253..d5a9bf69e 100644
--- a/bundle/internal/schema/annotations_openapi.yml
+++ b/bundle/internal/schema/annotations_openapi.yml
@@ -353,12 +353,12 @@ github.com/databricks/cli/bundle/config/resources.MlflowModel:
 github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint:
   "ai_gateway":
     "description": |-
-      The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.
+      The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported.
   "config":
     "description": |-
       The core config of the serving endpoint.
   "name":
-    "description": |
+    "description": |-
       The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.
       An endpoint name can consist of alphanumeric characters, dashes, and underscores.
   "rate_limits":
@@ -1974,6 +1974,9 @@ github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask:
       Parameters passed to the main method.
 
       Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
+  "run_as_repl":
+    "description": |-
+      Deprecated. A value of `false` is no longer supported.
 github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask:
   "parameters":
     "description": |-
@@ -2684,27 +2687,36 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScd
 github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig:
   "ai21labs_api_key":
     "description": |-
-      The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
+      The Databricks secret key reference for an AI21 Labs API key. If you
+      prefer to paste your API key directly, see `ai21labs_api_key_plaintext`.
+      You must provide an API key using one of the following fields:
+      `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
   "ai21labs_api_key_plaintext":
     "description": |-
-      An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
+      An AI21 Labs API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `ai21labs_api_key`. You
+      must provide an API key using one of the following fields:
+      `ai21labs_api_key` or `ai21labs_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig:
   "guardrails":
     "description": |-
       Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
   "inference_table_config":
     "description": |-
-      Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.
+      Configuration for payload logging using inference tables.
+      Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.
   "rate_limits":
     "description": |-
       Configuration for rate limits which can be set to limit endpoint traffic.
   "usage_tracking_config":
     "description": |-
-      Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.
+      Configuration to enable usage tracking using system tables.
+      These tables allow you to monitor operational usage on endpoints and their associated costs.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters:
   "invalid_keywords":
     "description": |-
-      List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.
+      List of invalid keywords.
+      AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.
   "pii":
     "description": |-
       Configuration for guardrail PII filter.
@@ -2713,15 +2725,14 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParame
       Indicates whether the safety filter is enabled.
   "valid_topics":
     "description": |-
-      The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.
+      The list of allowed topics.
+      Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior:
   "behavior":
     "description": |-
-      Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
+      Configuration for input guardrail filters.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior:
   "_":
-    "description": |-
-      Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
     "enum":
       - |-
         NONE
@@ -2737,30 +2748,32 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails:
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig:
   "catalog_name":
     "description": |-
-      The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.
+      The name of the catalog in Unity Catalog. Required when enabling inference tables.
+      NOTE: On update, you have to disable inference table first in order to change the catalog name.
   "enabled":
     "description": |-
       Indicates whether the inference table is enabled.
   "schema_name":
     "description": |-
-      The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.
+      The name of the schema in Unity Catalog. Required when enabling inference tables.
+      NOTE: On update, you have to disable inference table first in order to change the schema name.
   "table_name_prefix":
     "description": |-
-      The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.
+      The prefix of the table in Unity Catalog.
+      NOTE: On update, you have to disable inference table first in order to change the prefix name.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit:
   "calls":
     "description": |-
       Used to specify how many calls are allowed for a key within the renewal_period.
   "key":
     "description": |-
-      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
+      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,
+      with 'endpoint' being the default if not specified.
   "renewal_period":
     "description": |-
       Renewal period field for a rate limit. Currently, only 'minute' is supported.
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey:
   "_":
-    "description": |-
-      Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
     "enum":
       - |-
         user
@@ -2768,8 +2781,6 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey:
         endpoint
 github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod:
   "_":
-    "description": |-
-      Renewal period field for a rate limit. Currently, only 'minute' is supported.
     "enum":
       - |-
         minute
@@ -2780,26 +2791,43 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingCo
 github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig:
   "aws_access_key_id":
     "description": |-
-      The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.
+      The Databricks secret key reference for an AWS access key ID with
+      permissions to interact with Bedrock services. If you prefer to paste
+      your API key directly, see `aws_access_key_id_plaintext`. You must provide an API
+      key using one of the following fields: `aws_access_key_id` or
+      `aws_access_key_id_plaintext`.
   "aws_access_key_id_plaintext":
     "description": |-
-      An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.
+      An AWS access key ID with permissions to interact with Bedrock services
+      provided as a plaintext string. If you prefer to reference your key using
+      Databricks Secrets, see `aws_access_key_id`. You must provide an API key
+      using one of the following fields: `aws_access_key_id` or
+      `aws_access_key_id_plaintext`.
   "aws_region":
     "description": |-
       The AWS region to use. Bedrock has to be enabled there.
   "aws_secret_access_key":
     "description": |-
-      The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.
+      The Databricks secret key reference for an AWS secret access key paired
+      with the access key ID, with permissions to interact with Bedrock
+      services. If you prefer to paste your API key directly, see
+      `aws_secret_access_key_plaintext`. You must provide an API key using one
+      of the following fields: `aws_secret_access_key` or
+      `aws_secret_access_key_plaintext`.
   "aws_secret_access_key_plaintext":
     "description": |-
-      An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.
+      An AWS secret access key paired with the access key ID, with permissions
+      to interact with Bedrock services provided as a plaintext string. If you
+      prefer to reference your key using Databricks Secrets, see
+      `aws_secret_access_key`. You must provide an API key using one of the
+      following fields: `aws_secret_access_key` or
+      `aws_secret_access_key_plaintext`.
   "bedrock_provider":
     "description": |-
-      The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
+      The underlying provider in Amazon Bedrock. Supported values (case
+      insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
 github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider:
   "_":
-    "description": |-
-      The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
     "enum":
       - |-
         anthropic
@@ -2812,10 +2840,16 @@ github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedro
 github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig:
   "anthropic_api_key":
     "description": |-
-      The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.
+      The Databricks secret key reference for an Anthropic API key. If you
+      prefer to paste your API key directly, see `anthropic_api_key_plaintext`.
+      You must provide an API key using one of the following fields:
+      `anthropic_api_key` or `anthropic_api_key_plaintext`.
   "anthropic_api_key_plaintext":
     "description": |-
-      The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.
+      The Anthropic API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `anthropic_api_key`. You
+      must provide an API key using one of the following fields:
+      `anthropic_api_key` or `anthropic_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput:
   "catalog_name":
     "description": |-
@@ -2831,42 +2865,58 @@ github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput:
       The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.
 github.com/databricks/databricks-sdk-go/service/serving.CohereConfig:
   "cohere_api_base":
-    "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n"
+    "description": |-
+      This is an optional field to provide a customized base URL for the Cohere
+      API. If left unspecified, the standard Cohere base URL is used.
   "cohere_api_key":
     "description": |-
-      The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.
+      The Databricks secret key reference for a Cohere API key. If you prefer
+      to paste your API key directly, see `cohere_api_key_plaintext`. You must
+      provide an API key using one of the following fields: `cohere_api_key` or
+      `cohere_api_key_plaintext`.
   "cohere_api_key_plaintext":
     "description": |-
-      The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.
+      The Cohere API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `cohere_api_key`. You
+      must provide an API key using one of the following fields:
+      `cohere_api_key` or `cohere_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig:
   "databricks_api_token":
-    "description": |
-      The Databricks secret key reference for a Databricks API token that corresponds to a user or service
-      principal with Can Query access to the model serving endpoint pointed to by this external model.
-      If you prefer to paste your API key directly, see `databricks_api_token_plaintext`.
-      You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.
+    "description": |-
+      The Databricks secret key reference for a Databricks API token that
+      corresponds to a user or service principal with Can Query access to the
+      model serving endpoint pointed to by this external model. If you prefer
+      to paste your API key directly, see `databricks_api_token_plaintext`. You
+      must provide an API key using one of the following fields:
+      `databricks_api_token` or `databricks_api_token_plaintext`.
   "databricks_api_token_plaintext":
-    "description": |
-      The Databricks API token that corresponds to a user or service
-      principal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.
-      If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.
-      You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.
+    "description": |-
+      The Databricks API token that corresponds to a user or service principal
+      with Can Query access to the model serving endpoint pointed to by this
+      external model provided as a plaintext string. If you prefer to reference
+      your key using Databricks Secrets, see `databricks_api_token`. You must
+      provide an API key using one of the following fields:
+      `databricks_api_token` or `databricks_api_token_plaintext`.
   "databricks_workspace_url":
-    "description": |
-      The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.
+    "description": |-
+      The URL of the Databricks workspace containing the model serving endpoint
+      pointed to by this external model.
 github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput:
   "auto_capture_config":
     "description": |-
       Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+      Note: this field is deprecated for creating new provisioned throughput endpoints,
+      or updating existing provisioned throughput endpoints that never have inference table configured;
+      in these cases please use AI Gateway to manage inference tables.
   "served_entities":
     "description": |-
-      A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.
+      The list of served entities under the serving endpoint config.
   "served_models":
     "description": |-
-      (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.
+      (Deprecated, use served_entities instead) The list of served models under the serving endpoint config.
   "traffic_config":
     "description": |-
-      The traffic config defining how invocations to the serving endpoint should be routed.
+      The traffic configuration associated with the serving endpoint config.
 github.com/databricks/databricks-sdk-go/service/serving.EndpointTag:
   "key":
     "description": |-
@@ -2903,17 +2953,13 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModel:
     "description": |-
       PaLM Config. Only required if the provider is 'palm'.
   "provider":
-    "description": |
-      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',
-      'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
+    "description": |-
+      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.
   "task":
     "description": |-
       The task type of the external model.
 github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider:
   "_":
-    "description": |
-      The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',
-      'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
     "enum":
       - |-
         ai21labs
@@ -2934,70 +2980,114 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider:
 github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig:
   "private_key":
     "description": |-
-      The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`
+      The Databricks secret key reference for a private key for the service
+      account which has access to the Google Cloud Vertex AI Service. See [Best
+      practices for managing service account keys]. If you prefer to paste your
+      API key directly, see `private_key_plaintext`. You must provide an API
+      key using one of the following fields: `private_key` or
+      `private_key_plaintext`
+
+      [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys
   "private_key_plaintext":
     "description": |-
-      The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.
+      The private key for the service account which has access to the Google
+      Cloud Vertex AI Service provided as a plaintext secret. See [Best
+      practices for managing service account keys]. If you prefer to reference
+      your key using Databricks Secrets, see `private_key`. You must provide an
+      API key using one of the following fields: `private_key` or
+      `private_key_plaintext`.
+
+      [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys
   "project_id":
     "description": |-
-      This is the Google Cloud project id that the service account is associated with.
+      This is the Google Cloud project id that the service account is
+      associated with.
   "region":
     "description": |-
-      This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.
+      This is the region for the Google Cloud Vertex AI Service. See [supported
+      regions] for more details. Some models are only available in specific
+      regions.
+
+      [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations
 github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig:
+  "_":
+    "description": |-
+      Configs needed to create an OpenAI model route.
   "microsoft_entra_client_id":
-    "description": |
-      This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.
+    "description": |-
+      This field is only required for Azure AD OpenAI and is the Microsoft
+      Entra Client ID.
   "microsoft_entra_client_secret":
-    "description": |
-      The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.
-      If you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.
-      You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.
+    "description": |-
+      The Databricks secret key reference for a client secret used for
+      Microsoft Entra ID authentication. If you prefer to paste your client
+      secret directly, see `microsoft_entra_client_secret_plaintext`. You must
+      provide an API key using one of the following fields:
+      `microsoft_entra_client_secret` or
+      `microsoft_entra_client_secret_plaintext`.
   "microsoft_entra_client_secret_plaintext":
-    "description": |
-      The client secret used for Microsoft Entra ID authentication provided as a plaintext string.
-      If you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.
-      You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.
+    "description": |-
+      The client secret used for Microsoft Entra ID authentication provided as
+      a plaintext string. If you prefer to reference your key using Databricks
+      Secrets, see `microsoft_entra_client_secret`. You must provide an API key
+      using one of the following fields: `microsoft_entra_client_secret` or
+      `microsoft_entra_client_secret_plaintext`.
   "microsoft_entra_tenant_id":
-    "description": |
-      This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.
+    "description": |-
+      This field is only required for Azure AD OpenAI and is the Microsoft
+      Entra Tenant ID.
   "openai_api_base":
-    "description": |
-      This is a field to provide a customized base URl for the OpenAI API.
-      For Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service
-      provided by Azure.
-      For other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.
+    "description": |-
+      This is a field to provide a customized base URl for the OpenAI API. For
+      Azure OpenAI, this field is required, and is the base URL for the Azure
+      OpenAI API service provided by Azure. For other OpenAI API types, this
+      field is optional, and if left unspecified, the standard OpenAI base URL
+      is used.
   "openai_api_key":
     "description": |-
-      The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.
+      The Databricks secret key reference for an OpenAI API key using the
+      OpenAI or Azure service. If you prefer to paste your API key directly,
+      see `openai_api_key_plaintext`. You must provide an API key using one of
+      the following fields: `openai_api_key` or `openai_api_key_plaintext`.
   "openai_api_key_plaintext":
     "description": |-
-      The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.
+      The OpenAI API key using the OpenAI or Azure service provided as a
+      plaintext string. If you prefer to reference your key using Databricks
+      Secrets, see `openai_api_key`. You must provide an API key using one of
+      the following fields: `openai_api_key` or `openai_api_key_plaintext`.
   "openai_api_type":
-    "description": |
-      This is an optional field to specify the type of OpenAI API to use.
-      For Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security
-      access validation protocol. For access token validation, use azure. For authentication using Azure Active
+    "description": |-
+      This is an optional field to specify the type of OpenAI API to use. For
+      Azure OpenAI, this field is required, and adjust this parameter to
+      represent the preferred security access validation protocol. For access
+      token validation, use azure. For authentication using Azure Active
       Directory (Azure AD) use, azuread.
   "openai_api_version":
-    "description": |
-      This is an optional field to specify the OpenAI API version.
-      For Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to
-      utilize, specified by a date.
+    "description": |-
+      This is an optional field to specify the OpenAI API version. For Azure
+      OpenAI, this field is required, and is the version of the Azure OpenAI
+      service to utilize, specified by a date.
   "openai_deployment_name":
-    "description": |
-      This field is only required for Azure OpenAI and is the name of the deployment resource for the
-      Azure OpenAI service.
+    "description": |-
+      This field is only required for Azure OpenAI and is the name of the
+      deployment resource for the Azure OpenAI service.
   "openai_organization":
-    "description": |
-      This is an optional field to specify the organization in OpenAI or Azure OpenAI.
+    "description": |-
+      This is an optional field to specify the organization in OpenAI or Azure
+      OpenAI.
 github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig:
   "palm_api_key":
     "description": |-
-      The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.
+      The Databricks secret key reference for a PaLM API key. If you prefer to
+      paste your API key directly, see `palm_api_key_plaintext`. You must
+      provide an API key using one of the following fields: `palm_api_key` or
+      `palm_api_key_plaintext`.
   "palm_api_key_plaintext":
     "description": |-
-      The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.
+      The PaLM API key provided as a plaintext string. If you prefer to
+      reference your key using Databricks Secrets, see `palm_api_key`. You must
+      provide an API key using one of the following fields: `palm_api_key` or
+      `palm_api_key_plaintext`.
 github.com/databricks/databricks-sdk-go/service/serving.RateLimit:
   "calls":
     "description": |-
@@ -3010,8 +3100,6 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimit:
       Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
 github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey:
   "_":
-    "description": |-
-      Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
     "enum":
       - |-
         user
@@ -3019,8 +3107,6 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey:
         endpoint
 github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod:
   "_":
-    "description": |-
-      Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
     "enum":
       - |-
         minute
@@ -3033,21 +3119,15 @@ github.com/databricks/databricks-sdk-go/service/serving.Route:
       The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.
 github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
   "entity_name":
-    "description": |
-      The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),
-      or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of
-      __catalog_name__.__schema_name__.__model_name__.
-  "entity_version":
     "description": |-
-      The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.
+      The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.
+  "entity_version": {}
   "environment_vars":
-    "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`"
+    "description": |-
+      An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
   "external_model":
-    "description": |
-      The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)
-      can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,
-      it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.
-      The task type of all external models within an endpoint must be the same.
+    "description": |-
+      The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.
   "instance_profile_arn":
     "description": |-
       ARN of the instance profile that the served entity uses to access AWS resources.
@@ -3058,68 +3138,46 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
     "description": |-
       The minimum tokens per second that the endpoint can scale down to.
   "name":
-    "description": |
-      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.
-      If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other
-      entities, it defaults to <entity-name>-<entity-version>.
+    "description": |-
+      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.
   "scale_to_zero_enabled":
     "description": |-
       Whether the compute resources for the served entity should scale down to zero.
   "workload_size":
-    "description": |
-      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.
-      A single unit of provisioned concurrency can process one request at a time.
-      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
-      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
+    "description": |-
+      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
   "workload_type":
-    "description": |
-      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
-      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
-      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
+    "description": |-
+      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
   "environment_vars":
-    "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`"
+    "description": |-
+      An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`
   "instance_profile_arn":
     "description": |-
-      ARN of the instance profile that the served model will use to access AWS resources.
+      ARN of the instance profile that the served entity uses to access AWS resources.
   "max_provisioned_throughput":
     "description": |-
       The maximum tokens per second that the endpoint can scale up to.
   "min_provisioned_throughput":
     "description": |-
       The minimum tokens per second that the endpoint can scale down to.
-  "model_name":
-    "description": |
-      The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,
-      in the form of __catalog_name__.__schema_name__.__model_name__.
-  "model_version":
-    "description": |-
-      The version of the model in Databricks Model Registry or Unity Catalog to be served.
+  "model_name": {}
+  "model_version": {}
   "name":
-    "description": |
-      The name of a served model. It must be unique across an endpoint. If not specified, this field will default to <model-name>-<model-version>.
-      A served model name can consist of alphanumeric characters, dashes, and underscores.
+    "description": |-
+      The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.
   "scale_to_zero_enabled":
     "description": |-
-      Whether the compute resources for the served model should scale down to zero.
+      Whether the compute resources for the served entity should scale down to zero.
   "workload_size":
-    "description": |
-      The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.
-      A single unit of provisioned concurrency can process one request at a time.
-      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
-      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
+    "description": |-
+      The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.
   "workload_type":
-    "description": |
-      The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
-      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
-      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
+    "description": |-
+      The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize:
   "_":
-    "description": |
-      The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.
-      A single unit of provisioned concurrency can process one request at a time.
-      Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency).
-      If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
     "enum":
       - |-
         Small
@@ -3129,17 +3187,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkload
         Large
 github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType:
   "_":
-    "description": |
-      The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is
-      "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.
-      See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).
     "enum":
       - |-
         CPU
+      - |-
+        GPU_MEDIUM
       - |-
         GPU_SMALL
+      - |-
+        GPU_LARGE
+      - |-
+        MULTIGPU_MEDIUM
+github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType:
+  "_":
+    "enum":
+      - |-
+        CPU
       - |-
         GPU_MEDIUM
+      - |-
+        GPU_SMALL
       - |-
         GPU_LARGE
       - |-
diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml
index 120a12543..323432fa3 100644
--- a/bundle/internal/schema/annotations_openapi_overrides.yml
+++ b/bundle/internal/schema/annotations_openapi_overrides.yml
@@ -197,3 +197,14 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger:
   "manual":
     "description": |-
       PLACEHOLDER
+github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput:
+  "entity_version":
+    "description": |-
+      PLACEHOLDER
+github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
+  "model_name":
+    "description": |-
+      PLACEHOLDER
+  "model_version":
+    "description": |-
+      PLACEHOLDER
diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json
index 4a3b56814..17a621ba0 100644
--- a/bundle/schema/jsonschema.json
+++ b/bundle/schema/jsonschema.json
@@ -546,7 +546,7 @@
                     "type": "object",
                     "properties": {
                       "ai_gateway": {
-                        "description": "The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.",
+                        "description": "The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported.",
                         "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig"
                       },
                       "config": {
@@ -554,7 +554,7 @@
                         "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput"
                       },
                       "name": {
-                        "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.\n",
+                        "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.",
                         "$ref": "#/$defs/string"
                       },
                       "permissions": {
@@ -575,7 +575,6 @@
                     },
                     "additionalProperties": false,
                     "required": [
-                      "config",
                       "name"
                     ]
                   },
@@ -4142,6 +4141,10 @@
                     "parameters": {
                       "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.",
                       "$ref": "#/$defs/slice/string"
+                    },
+                    "run_as_repl": {
+                      "description": "Deprecated. A value of `false` is no longer supported.",
+                      "$ref": "#/$defs/bool"
                     }
                   },
                   "additionalProperties": false
@@ -5502,11 +5505,11 @@
                   "type": "object",
                   "properties": {
                     "ai21labs_api_key": {
-                      "description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "ai21labs_api_key_plaintext": {
-                      "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
+                      "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5528,7 +5531,7 @@
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails"
                     },
                     "inference_table_config": {
-                      "description": "Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.",
+                      "description": "Configuration for payload logging using inference tables.\nUse these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig"
                     },
                     "rate_limits": {
@@ -5536,7 +5539,7 @@
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit"
                     },
                     "usage_tracking_config": {
-                      "description": "Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.",
+                      "description": "Configuration to enable usage tracking using system tables.\nThese tables allow you to monitor operational usage on endpoints and their associated costs.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig"
                     }
                   },
@@ -5554,7 +5557,7 @@
                   "type": "object",
                   "properties": {
                     "invalid_keywords": {
-                      "description": "List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.",
+                      "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.",
                       "$ref": "#/$defs/slice/string"
                     },
                     "pii": {
@@ -5566,7 +5569,7 @@
                       "$ref": "#/$defs/bool"
                     },
                     "valid_topics": {
-                      "description": "The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.",
+                      "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.",
                       "$ref": "#/$defs/slice/string"
                     }
                   },
@@ -5584,14 +5587,11 @@
                   "type": "object",
                   "properties": {
                     "behavior": {
-                      "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.",
+                      "description": "Configuration for input guardrail filters.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior"
                     }
                   },
-                  "additionalProperties": false,
-                  "required": [
-                    "behavior"
-                  ]
+                  "additionalProperties": false
                 },
                 {
                   "type": "string",
@@ -5603,7 +5603,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.",
                   "enum": [
                     "NONE",
                     "BLOCK"
@@ -5643,7 +5642,7 @@
                   "type": "object",
                   "properties": {
                     "catalog_name": {
-                      "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.",
+                      "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the catalog name.",
                       "$ref": "#/$defs/string"
                     },
                     "enabled": {
@@ -5651,11 +5650,11 @@
                       "$ref": "#/$defs/bool"
                     },
                     "schema_name": {
-                      "description": "The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.",
+                      "description": "The name of the schema in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the schema name.",
                       "$ref": "#/$defs/string"
                     },
                     "table_name_prefix": {
-                      "description": "The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.",
+                      "description": "The prefix of the table in Unity Catalog.\nNOTE: On update, you have to disable inference table first in order to change the prefix name.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5674,10 +5673,10 @@
                   "properties": {
                     "calls": {
                       "description": "Used to specify how many calls are allowed for a key within the renewal_period.",
-                      "$ref": "#/$defs/int"
+                      "$ref": "#/$defs/int64"
                     },
                     "key": {
-                      "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
+                      "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey"
                     },
                     "renewal_period": {
@@ -5701,7 +5700,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
                   "enum": [
                     "user",
                     "endpoint"
@@ -5717,7 +5715,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.",
                   "enum": [
                     "minute"
                   ]
@@ -5752,11 +5749,11 @@
                   "type": "object",
                   "properties": {
                     "aws_access_key_id": {
-                      "description": "The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
+                      "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_access_key_id_plaintext": {
-                      "description": "An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
+                      "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_region": {
@@ -5764,15 +5761,15 @@
                       "$ref": "#/$defs/string"
                     },
                     "aws_secret_access_key": {
-                      "description": "The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "aws_secret_access_key_plaintext": {
-                      "description": "An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
+                      "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "bedrock_provider": {
-                      "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
+                      "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider"
                     }
                   },
@@ -5792,7 +5789,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
                   "enum": [
                     "anthropic",
                     "cohere",
@@ -5812,11 +5808,11 @@
                   "type": "object",
                   "properties": {
                     "anthropic_api_key": {
-                      "description": "The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "anthropic_api_key_plaintext": {
-                      "description": "The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
+                      "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5864,15 +5860,15 @@
                   "type": "object",
                   "properties": {
                     "cohere_api_base": {
-                      "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n",
+                      "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.",
                       "$ref": "#/$defs/string"
                     },
                     "cohere_api_key": {
-                      "description": "The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "cohere_api_key_plaintext": {
-                      "description": "The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
+                      "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5890,15 +5886,15 @@
                   "type": "object",
                   "properties": {
                     "databricks_api_token": {
-                      "description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\nIf you prefer to paste your API key directly, see `databricks_api_token_plaintext`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
+                      "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "databricks_api_token_plaintext": {
-                      "description": "The Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
+                      "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "databricks_workspace_url": {
-                      "description": "The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.\n",
+                      "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -5919,19 +5915,19 @@
                   "type": "object",
                   "properties": {
                     "auto_capture_config": {
-                      "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
+                      "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput"
                     },
                     "served_entities": {
-                      "description": "A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.",
+                      "description": "The list of served entities under the serving endpoint config.",
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput"
                     },
                     "served_models": {
-                      "description": "(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.",
+                      "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.",
                       "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput"
                     },
                     "traffic_config": {
-                      "description": "The traffic config defining how invocations to the serving endpoint should be routed.",
+                      "description": "The traffic configuration associated with the serving endpoint config.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig"
                     }
                   },
@@ -6010,7 +6006,7 @@
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig"
                     },
                     "provider": {
-                      "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n",
+                      "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider"
                     },
                     "task": {
@@ -6035,7 +6031,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n",
                   "enum": [
                     "ai21labs",
                     "anthropic",
@@ -6059,23 +6054,27 @@
                   "type": "object",
                   "properties": {
                     "private_key": {
-                      "description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`",
+                      "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys",
                       "$ref": "#/$defs/string"
                     },
                     "private_key_plaintext": {
-                      "description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.",
+                      "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys",
                       "$ref": "#/$defs/string"
                     },
                     "project_id": {
-                      "description": "This is the Google Cloud project id that the service account is associated with.",
+                      "description": "This is the Google Cloud project id that the service account is\nassociated with.",
                       "$ref": "#/$defs/string"
                     },
                     "region": {
-                      "description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.",
+                      "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations",
                       "$ref": "#/$defs/string"
                     }
                   },
-                  "additionalProperties": false
+                  "additionalProperties": false,
+                  "required": [
+                    "project_id",
+                    "region"
+                  ]
                 },
                 {
                   "type": "string",
@@ -6087,49 +6086,50 @@
               "oneOf": [
                 {
                   "type": "object",
+                  "description": "Configs needed to create an OpenAI model route.",
                   "properties": {
                     "microsoft_entra_client_id": {
-                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n",
+                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_client_secret": {
-                      "description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
+                      "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_client_secret_plaintext": {
-                      "description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n",
+                      "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "microsoft_entra_tenant_id": {
-                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n",
+                      "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_base": {
-                      "description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n",
+                      "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_key": {
-                      "description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_key_plaintext": {
-                      "description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.",
+                      "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_type": {
-                      "description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n",
+                      "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_api_version": {
-                      "description": "This is an optional field to specify the OpenAI API version.\nFor Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to\nutilize, specified by a date.\n",
+                      "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_deployment_name": {
-                      "description": "This field is only required for Azure OpenAI and is the name of the deployment resource for the\nAzure OpenAI service.\n",
+                      "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.",
                       "$ref": "#/$defs/string"
                     },
                     "openai_organization": {
-                      "description": "This is an optional field to specify the organization in OpenAI or Azure OpenAI.\n",
+                      "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -6147,11 +6147,11 @@
                   "type": "object",
                   "properties": {
                     "palm_api_key": {
-                      "description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
+                      "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     },
                     "palm_api_key_plaintext": {
-                      "description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.",
+                      "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.",
                       "$ref": "#/$defs/string"
                     }
                   },
@@ -6170,7 +6170,7 @@
                   "properties": {
                     "calls": {
                       "description": "Used to specify how many calls are allowed for a key within the renewal_period.",
-                      "$ref": "#/$defs/int"
+                      "$ref": "#/$defs/int64"
                     },
                     "key": {
                       "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
@@ -6197,7 +6197,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
                   "enum": [
                     "user",
                     "endpoint"
@@ -6213,7 +6212,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.",
                   "enum": [
                     "minute"
                   ]
@@ -6256,19 +6254,18 @@
                   "type": "object",
                   "properties": {
                     "entity_name": {
-                      "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),\nor a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of\n__catalog_name__.__schema_name__.__model_name__.\n",
+                      "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.",
                       "$ref": "#/$defs/string"
                     },
                     "entity_version": {
-                      "description": "The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.",
                       "$ref": "#/$defs/string"
                     },
                     "environment_vars": {
-                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
+                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
                       "$ref": "#/$defs/map/string"
                     },
                     "external_model": {
-                      "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,\nit cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.\nThe task type of all external models within an endpoint must be the same.\n",
+                      "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel"
                     },
                     "instance_profile_arn": {
@@ -6284,7 +6281,7 @@
                       "$ref": "#/$defs/int"
                     },
                     "name": {
-                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.\nIf not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other\nentities, it defaults to \u003centity-name\u003e-\u003centity-version\u003e.\n",
+                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.",
                       "$ref": "#/$defs/string"
                     },
                     "scale_to_zero_enabled": {
@@ -6292,12 +6289,12 @@
                       "$ref": "#/$defs/bool"
                     },
                     "workload_size": {
-                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.\n",
+                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.",
                       "$ref": "#/$defs/string"
                     },
                     "workload_type": {
-                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
-                      "$ref": "#/$defs/string"
+                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).",
+                      "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType"
                     }
                   },
                   "additionalProperties": false
@@ -6314,11 +6311,11 @@
                   "type": "object",
                   "properties": {
                     "environment_vars": {
-                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
+                      "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
                       "$ref": "#/$defs/map/string"
                     },
                     "instance_profile_arn": {
-                      "description": "ARN of the instance profile that the served model will use to access AWS resources.",
+                      "description": "ARN of the instance profile that the served entity uses to access AWS resources.",
                       "$ref": "#/$defs/string"
                     },
                     "max_provisioned_throughput": {
@@ -6330,27 +6327,25 @@
                       "$ref": "#/$defs/int"
                     },
                     "model_name": {
-                      "description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n",
                       "$ref": "#/$defs/string"
                     },
                     "model_version": {
-                      "description": "The version of the model in Databricks Model Registry or Unity Catalog to be served.",
                       "$ref": "#/$defs/string"
                     },
                     "name": {
-                      "description": "The name of a served model. It must be unique across an endpoint. If not specified, this field will default to \u003cmodel-name\u003e-\u003cmodel-version\u003e.\nA served model name can consist of alphanumeric characters, dashes, and underscores.\n",
+                      "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.",
                       "$ref": "#/$defs/string"
                     },
                     "scale_to_zero_enabled": {
-                      "description": "Whether the compute resources for the served model should scale down to zero.",
+                      "description": "Whether the compute resources for the served entity should scale down to zero.",
                       "$ref": "#/$defs/bool"
                     },
                     "workload_size": {
-                      "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n",
+                      "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize"
                     },
                     "workload_type": {
-                      "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
+                      "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).",
                       "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType"
                     }
                   },
@@ -6371,7 +6366,6 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n",
                   "enum": [
                     "Small",
                     "Medium",
@@ -6388,11 +6382,28 @@
               "oneOf": [
                 {
                   "type": "string",
-                  "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
                   "enum": [
                     "CPU",
-                    "GPU_SMALL",
                     "GPU_MEDIUM",
+                    "GPU_SMALL",
+                    "GPU_LARGE",
+                    "MULTIGPU_MEDIUM"
+                  ]
+                },
+                {
+                  "type": "string",
+                  "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
+                }
+              ]
+            },
+            "serving.ServingModelWorkloadType": {
+              "oneOf": [
+                {
+                  "type": "string",
+                  "enum": [
+                    "CPU",
+                    "GPU_MEDIUM",
+                    "GPU_SMALL",
                     "GPU_LARGE",
                     "MULTIGPU_MEDIUM"
                   ]
diff --git a/cmd/account/custom-app-integration/custom-app-integration.go b/cmd/account/custom-app-integration/custom-app-integration.go
index 1eec1018e..43e458bc6 100755
--- a/cmd/account/custom-app-integration/custom-app-integration.go
+++ b/cmd/account/custom-app-integration/custom-app-integration.go
@@ -307,6 +307,7 @@ func newUpdate() *cobra.Command {
 	cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	// TODO: array: redirect_urls
+	// TODO: array: scopes
 	// TODO: complex arg: token_access_policy
 
 	cmd.Use = "update INTEGRATION_ID"
diff --git a/cmd/api/api.go b/cmd/api/api.go
index c3a3eb0b6..fad8a026f 100644
--- a/cmd/api/api.go
+++ b/cmd/api/api.go
@@ -62,7 +62,7 @@ func makeCommand(method string) *cobra.Command {
 
 			var response any
 			headers := map[string]string{"Content-Type": "application/json"}
-			err = api.Do(cmd.Context(), method, path, headers, request, &response)
+			err = api.Do(cmd.Context(), method, path, headers, nil, request, &response)
 			if err != nil {
 				return err
 			}
diff --git a/cmd/workspace/access-control/access-control.go b/cmd/workspace/access-control/access-control.go
new file mode 100755
index 000000000..7668265fb
--- /dev/null
+++ b/cmd/workspace/access-control/access-control.go
@@ -0,0 +1,109 @@
+// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+package access_control
+
+import (
+	"fmt"
+
+	"github.com/databricks/cli/cmd/root"
+	"github.com/databricks/cli/libs/cmdio"
+	"github.com/databricks/cli/libs/flags"
+	"github.com/databricks/databricks-sdk-go/service/iam"
+	"github.com/spf13/cobra"
+)
+
+// Slice with functions to override default command behavior.
+// Functions can be added from the `init()` function in manually curated files in this directory.
+var cmdOverrides []func(*cobra.Command)
+
+func New() *cobra.Command {
+	cmd := &cobra.Command{
+		Use:     "access-control",
+		Short:   `Rule based Access Control for Databricks Resources.`,
+		Long:    `Rule based Access Control for Databricks Resources.`,
+		GroupID: "iam",
+		Annotations: map[string]string{
+			"package": "iam",
+		},
+
+		// This service is being previewed; hide from help output.
+		Hidden: true,
+	}
+
+	// Add methods
+	cmd.AddCommand(newCheckPolicy())
+
+	// Apply optional overrides to this command.
+	for _, fn := range cmdOverrides {
+		fn(cmd)
+	}
+
+	return cmd
+}
+
+// start check-policy command
+
+// Slice with functions to override default command behavior.
+// Functions can be added from the `init()` function in manually curated files in this directory.
+var checkPolicyOverrides []func(
+	*cobra.Command,
+	*iam.CheckPolicyRequest,
+)
+
+func newCheckPolicy() *cobra.Command {
+	cmd := &cobra.Command{}
+
+	var checkPolicyReq iam.CheckPolicyRequest
+	var checkPolicyJson flags.JsonFlag
+
+	// TODO: short flags
+	cmd.Flags().Var(&checkPolicyJson, "json", `either inline JSON string or @path/to/file.json with request body`)
+
+	// TODO: complex arg: resource_info
+
+	cmd.Use = "check-policy"
+	cmd.Short = `Check access policy to a resource.`
+	cmd.Long = `Check access policy to a resource.`
+
+	cmd.Annotations = make(map[string]string)
+
+	cmd.PreRunE = root.MustWorkspaceClient
+	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
+		ctx := cmd.Context()
+		w := root.WorkspaceClient(ctx)
+
+		if cmd.Flags().Changed("json") {
+			diags := checkPolicyJson.Unmarshal(&checkPolicyReq)
+			if diags.HasError() {
+				return diags.Error()
+			}
+			if len(diags) > 0 {
+				err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags)
+				if err != nil {
+					return err
+				}
+			}
+		} else {
+			return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
+		}
+
+		response, err := w.AccessControl.CheckPolicy(ctx, checkPolicyReq)
+		if err != nil {
+			return err
+		}
+		return cmdio.Render(ctx, response)
+	}
+
+	// Disable completions since they are not applicable.
+	// Can be overridden by manual implementation in `override.go`.
+	cmd.ValidArgsFunction = cobra.NoFileCompletions
+
+	// Apply optional overrides to this command.
+	for _, fn := range checkPolicyOverrides {
+		fn(cmd, &checkPolicyReq)
+	}
+
+	return cmd
+}
+
+// end service AccessControl
diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go
index f07d0cf76..c447bd736 100755
--- a/cmd/workspace/cmd.go
+++ b/cmd/workspace/cmd.go
@@ -3,6 +3,7 @@
 package workspace
 
 import (
+	access_control "github.com/databricks/cli/cmd/workspace/access-control"
 	alerts "github.com/databricks/cli/cmd/workspace/alerts"
 	alerts_legacy "github.com/databricks/cli/cmd/workspace/alerts-legacy"
 	apps "github.com/databricks/cli/cmd/workspace/apps"
@@ -96,6 +97,7 @@ import (
 func All() []*cobra.Command {
 	var out []*cobra.Command
 
+	out = append(out, access_control.New())
 	out = append(out, alerts.New())
 	out = append(out, alerts_legacy.New())
 	out = append(out, apps.New())
diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go
index 504beac5e..4d6262cff 100755
--- a/cmd/workspace/providers/providers.go
+++ b/cmd/workspace/providers/providers.go
@@ -64,7 +64,7 @@ func newCreate() *cobra.Command {
 	cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`)
-	cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`)
+	cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`)
 
 	cmd.Use = "create NAME AUTHENTICATION_TYPE"
 	cmd.Short = `Create an auth provider.`
@@ -430,7 +430,7 @@ func newUpdate() *cobra.Command {
 	cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`)
 	cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the provider.`)
 	cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`)
-	cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`)
+	cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`)
 
 	cmd.Use = "update NAME"
 	cmd.Short = `Update a provider.`
diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go
index 56abd2014..6d6ce42f1 100755
--- a/cmd/workspace/recipients/recipients.go
+++ b/cmd/workspace/recipients/recipients.go
@@ -91,7 +91,7 @@ func newCreate() *cobra.Command {
 	cmd.Long = `Create a share recipient.
   
   Creates a new recipient with the delta sharing authentication type in the
-  metastore. The caller must be a metastore admin or has the
+  metastore. The caller must be a metastore admin or have the
   **CREATE_RECIPIENT** privilege on the metastore.
 
   Arguments:
@@ -186,28 +186,16 @@ func newDelete() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "Name of the recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have name of the recipient")
-		}
 		deleteReq.Name = args[0]
 
 		err = w.Recipients.Delete(ctx, deleteReq)
@@ -258,28 +246,16 @@ func newGet() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "Name of the recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have name of the recipient")
-		}
 		getReq.Name = args[0]
 
 		response, err := w.Recipients.Get(ctx, getReq)
@@ -384,7 +360,7 @@ func newRotateToken() *cobra.Command {
   the provided token info. The caller must be the owner of the recipient.
 
   Arguments:
-    NAME: The name of the recipient.
+    NAME: The name of the Recipient.
     EXISTING_TOKEN_EXPIRE_IN_SECONDS: The expiration time of the bearer token in ISO 8601 format. This will set
       the expiration_time of existing token only to a smaller timestamp, it
       cannot extend the expiration_time. Use 0 to expire the existing token
@@ -479,28 +455,16 @@ func newSharePermissions() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
 		w := root.WorkspaceClient(ctx)
 
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "The name of the Recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have the name of the recipient")
-		}
 		sharePermissionsReq.Name = args[0]
 
 		response, err := w.Recipients.SharePermissions(ctx, sharePermissionsReq)
@@ -560,6 +524,11 @@ func newUpdate() *cobra.Command {
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
@@ -577,30 +546,13 @@ func newUpdate() *cobra.Command {
 				}
 			}
 		}
-		if len(args) == 0 {
-			promptSpinner := cmdio.Spinner(ctx)
-			promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down."
-			names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{})
-			close(promptSpinner)
-			if err != nil {
-				return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err)
-			}
-			id, err := cmdio.Select(ctx, names, "Name of the recipient")
-			if err != nil {
-				return err
-			}
-			args = append(args, id)
-		}
-		if len(args) != 1 {
-			return fmt.Errorf("expected to have name of the recipient")
-		}
 		updateReq.Name = args[0]
 
-		err = w.Recipients.Update(ctx, updateReq)
+		response, err := w.Recipients.Update(ctx, updateReq)
 		if err != nil {
 			return err
 		}
-		return nil
+		return cmdio.Render(ctx, response)
 	}
 
 	// Disable completions since they are not applicable.
diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go
index cc99177c7..034133623 100755
--- a/cmd/workspace/serving-endpoints/serving-endpoints.go
+++ b/cmd/workspace/serving-endpoints/serving-endpoints.go
@@ -49,6 +49,7 @@ func New() *cobra.Command {
 	cmd.AddCommand(newGetOpenApi())
 	cmd.AddCommand(newGetPermissionLevels())
 	cmd.AddCommand(newGetPermissions())
+	cmd.AddCommand(newHttpRequest())
 	cmd.AddCommand(newList())
 	cmd.AddCommand(newLogs())
 	cmd.AddCommand(newPatch())
@@ -153,16 +154,34 @@ func newCreate() *cobra.Command {
 	cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
 
 	// TODO: complex arg: ai_gateway
+	// TODO: complex arg: config
 	// TODO: array: rate_limits
 	cmd.Flags().BoolVar(&createReq.RouteOptimized, "route-optimized", createReq.RouteOptimized, `Enable route optimization for the serving endpoint.`)
 	// TODO: array: tags
 
-	cmd.Use = "create"
+	cmd.Use = "create NAME"
 	cmd.Short = `Create a new serving endpoint.`
-	cmd.Long = `Create a new serving endpoint.`
+	cmd.Long = `Create a new serving endpoint.
+
+  Arguments:
+    NAME: The name of the serving endpoint. This field is required and must be
+      unique across a Databricks workspace. An endpoint name can consist of
+      alphanumeric characters, dashes, and underscores.`
 
 	cmd.Annotations = make(map[string]string)
 
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		if cmd.Flags().Changed("json") {
+			err := root.ExactArgs(0)(cmd, args)
+			if err != nil {
+				return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name' in your JSON input")
+			}
+			return nil
+		}
+		check := root.ExactArgs(1)
+		return check(cmd, args)
+	}
+
 	cmd.PreRunE = root.MustWorkspaceClient
 	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
 		ctx := cmd.Context()
@@ -179,8 +198,9 @@ func newCreate() *cobra.Command {
 					return err
 				}
 			}
-		} else {
-			return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
+		}
+		if !cmd.Flags().Changed("json") {
+			createReq.Name = args[0]
 		}
 
 		wait, err := w.ServingEndpoints.Create(ctx, createReq)
@@ -233,10 +253,7 @@ func newDelete() *cobra.Command {
 
 	cmd.Use = "delete NAME"
 	cmd.Short = `Delete a serving endpoint.`
-	cmd.Long = `Delete a serving endpoint.
-
-  Arguments:
-    NAME: The name of the serving endpoint. This field is required.`
+	cmd.Long = `Delete a serving endpoint.`
 
 	cmd.Annotations = make(map[string]string)
 
@@ -432,11 +449,12 @@ func newGetOpenApi() *cobra.Command {
 
 		getOpenApiReq.Name = args[0]
 
-		err = w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq)
+		response, err := w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq)
 		if err != nil {
 			return err
 		}
-		return nil
+		defer response.Contents.Close()
+		return cmdio.Render(ctx, response.Contents)
 	}
 
 	// Disable completions since they are not applicable.
@@ -568,6 +586,77 @@ func newGetPermissions() *cobra.Command {
 	return cmd
 }
 
+// start http-request command
+
+// Slice with functions to override default command behavior.
+// Functions can be added from the `init()` function in manually curated files in this directory.
+var httpRequestOverrides []func(
+	*cobra.Command,
+	*serving.ExternalFunctionRequest,
+)
+
+func newHttpRequest() *cobra.Command {
+	cmd := &cobra.Command{}
+
+	var httpRequestReq serving.ExternalFunctionRequest
+
+	// TODO: short flags
+
+	cmd.Flags().StringVar(&httpRequestReq.Headers, "headers", httpRequestReq.Headers, `Additional headers for the request.`)
+	cmd.Flags().StringVar(&httpRequestReq.Json, "json", httpRequestReq.Json, `The JSON payload to send in the request body.`)
+	cmd.Flags().StringVar(&httpRequestReq.Params, "params", httpRequestReq.Params, `Query parameters for the request.`)
+
+	cmd.Use = "http-request CONNECTION_NAME METHOD PATH"
+	cmd.Short = `Make external services call using the credentials stored in UC Connection.`
+	cmd.Long = `Make external services call using the credentials stored in UC Connection.
+
+  Arguments:
+    CONNECTION_NAME: The connection name to use. This is required to identify the external
+      connection.
+    METHOD: The HTTP method to use (e.g., 'GET', 'POST').
+    PATH: The relative path for the API endpoint. This is required.`
+
+	// This command is being previewed; hide from help output.
+	cmd.Hidden = true
+
+	cmd.Annotations = make(map[string]string)
+
+	cmd.Args = func(cmd *cobra.Command, args []string) error {
+		check := root.ExactArgs(3)
+		return check(cmd, args)
+	}
+
+	cmd.PreRunE = root.MustWorkspaceClient
+	cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
+		ctx := cmd.Context()
+		w := root.WorkspaceClient(ctx)
+
+		httpRequestReq.ConnectionName = args[0]
+		_, err = fmt.Sscan(args[1], &httpRequestReq.Method)
+		if err != nil {
+			return fmt.Errorf("invalid METHOD: %s", args[1])
+		}
+		httpRequestReq.Path = args[2]
+
+		response, err := w.ServingEndpoints.HttpRequest(ctx, httpRequestReq)
+		if err != nil {
+			return err
+		}
+		return cmdio.Render(ctx, response)
+	}
+
+	// Disable completions since they are not applicable.
+	// Can be overridden by manual implementation in `override.go`.
+	cmd.ValidArgsFunction = cobra.NoFileCompletions
+
+	// Apply optional overrides to this command.
+	for _, fn := range httpRequestOverrides {
+		fn(cmd, &httpRequestReq)
+	}
+
+	return cmd
+}
+
 // start list command
 
 // Slice with functions to override default command behavior.
@@ -849,7 +938,7 @@ func newPutAiGateway() *cobra.Command {
 	cmd.Long = `Update AI Gateway of a serving endpoint.
   
   Used to update the AI Gateway of a serving endpoint. NOTE: Only external model
-  endpoints are currently supported.
+  and provisioned throughput endpoints are currently supported.
 
   Arguments:
     NAME: The name of the serving endpoint whose AI Gateway is being updated. This
diff --git a/go.mod b/go.mod
index 930963f89..bd8997190 100644
--- a/go.mod
+++ b/go.mod
@@ -8,7 +8,7 @@ require (
 	github.com/BurntSushi/toml v1.4.0 // MIT
 	github.com/Masterminds/semver/v3 v3.3.1 // MIT
 	github.com/briandowns/spinner v1.23.1 // Apache 2.0
-	github.com/databricks/databricks-sdk-go v0.55.0 // Apache 2.0
+	github.com/databricks/databricks-sdk-go v0.56.1 // Apache 2.0
 	github.com/fatih/color v1.18.0 // MIT
 	github.com/google/uuid v1.6.0 // BSD-3-Clause
 	github.com/hashicorp/go-version v1.7.0 // MPL 2.0
diff --git a/go.sum b/go.sum
index d025b3947..dec1d40b2 100644
--- a/go.sum
+++ b/go.sum
@@ -34,8 +34,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
 github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
 github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo=
 github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
-github.com/databricks/databricks-sdk-go v0.55.0 h1:ReziD6spzTDltM0ml80LggKo27F3oUjgTinCFDJDnak=
-github.com/databricks/databricks-sdk-go v0.55.0/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI=
+github.com/databricks/databricks-sdk-go v0.56.1 h1:sgweTRvAQaI8EPrfDnVdAB0lNX6L5uTT720SlMMQI2U=
+github.com/databricks/databricks-sdk-go v0.56.1/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
diff --git a/integration/cmd/sync/sync_test.go b/integration/cmd/sync/sync_test.go
index 632497054..88e6ed89a 100644
--- a/integration/cmd/sync/sync_test.go
+++ b/integration/cmd/sync/sync_test.go
@@ -158,7 +158,7 @@ func (a *syncTest) remoteFileContent(ctx context.Context, relativePath, expected
 
 	var res []byte
 	a.c.Eventually(func() bool {
-		err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &res)
+		err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &res)
 		require.NoError(a.t, err)
 		actualContent := string(res)
 		return actualContent == expectedContent
diff --git a/libs/filer/files_client.go b/libs/filer/files_client.go
index 88bbadd32..7102b6e29 100644
--- a/libs/filer/files_client.go
+++ b/libs/filer/files_client.go
@@ -148,7 +148,7 @@ func (w *FilesClient) Write(ctx context.Context, name string, reader io.Reader,
 	overwrite := slices.Contains(mode, OverwriteIfExists)
 	urlPath = fmt.Sprintf("%s?overwrite=%t", urlPath, overwrite)
 	headers := map[string]string{"Content-Type": "application/octet-stream"}
-	err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, reader, nil)
+	err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, nil, reader, nil)
 
 	// Return early on success.
 	if err == nil {
@@ -176,7 +176,7 @@ func (w *FilesClient) Read(ctx context.Context, name string) (io.ReadCloser, err
 	}
 
 	var reader io.ReadCloser
-	err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &reader)
+	err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &reader)
 
 	// Return early on success.
 	if err == nil {
diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go
index 8d5148edd..1d514f13b 100644
--- a/libs/filer/workspace_files_client.go
+++ b/libs/filer/workspace_files_client.go
@@ -106,7 +106,7 @@ func (info *wsfsFileInfo) MarshalJSON() ([]byte, error) {
 // as an interface to allow for mocking in tests.
 type apiClient interface {
 	Do(ctx context.Context, method, path string,
-		headers map[string]string, request, response any,
+		headers map[string]string, queryString map[string]any, request, response any,
 		visitors ...func(*http.Request) error) error
 }
 
@@ -156,7 +156,7 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io
 		return err
 	}
 
-	err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, body, nil)
+	err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, nil, body, nil)
 
 	// Return early on success.
 	if err == nil {
@@ -341,6 +341,7 @@ func (w *WorkspaceFilesClient) Stat(ctx context.Context, name string) (fs.FileIn
 		http.MethodGet,
 		"/api/2.0/workspace/get-status",
 		nil,
+		nil,
 		map[string]string{
 			"path":               absPath,
 			"return_export_info": "true",
diff --git a/libs/filer/workspace_files_extensions_client_test.go b/libs/filer/workspace_files_extensions_client_test.go
index 9ea837fa9..e9fde4762 100644
--- a/libs/filer/workspace_files_extensions_client_test.go
+++ b/libs/filer/workspace_files_extensions_client_test.go
@@ -17,7 +17,7 @@ type mockApiClient struct {
 }
 
 func (m *mockApiClient) Do(ctx context.Context, method, path string,
-	headers map[string]string, request, response any,
+	headers map[string]string, queryString map[string]any, request, response any,
 	visitors ...func(*http.Request) error,
 ) error {
 	args := m.Called(ctx, method, path, headers, request, response, visitors)
diff --git a/libs/git/info.go b/libs/git/info.go
index 46e57be48..dc4af9b6d 100644
--- a/libs/git/info.go
+++ b/libs/git/info.go
@@ -66,6 +66,7 @@ func fetchRepositoryInfoAPI(ctx context.Context, path string, w *databricks.Work
 		http.MethodGet,
 		apiEndpoint,
 		nil,
+		nil,
 		map[string]string{
 			"path":            path,
 			"return_git_info": "true",

From 65fbbd9a7c75a2404fa3d4956560ab037535d779 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 14:22:08 +0100
Subject: [PATCH 25/39] libs/python: Remove DetectInterpreters (#2234)

## Changes
- Remove DetectInterpreters from DetectExecutable call: python3 or
python should always be on on the PATH. We don't need to detect
non-standard situations like python3.10 is present but python3 is not.
- I moved DetectInterpreters to cmd/labs where it is still used.

This is a follow up to https://github.com/databricks/cli/pull/2034

## Tests
Existing tests.
---
 cmd/labs/project/installer.go                 |  3 +--
 .../labs/project}/interpreters.go             |  2 +-
 .../labs/project}/interpreters_unix_test.go   |  2 +-
 .../labs/project}/interpreters_win_test.go    |  2 +-
 .../testdata/other-binaries-filtered/python   |  0
 .../other-binaries-filtered/python3-whatever  |  0
 .../other-binaries-filtered/python3.10        |  0
 .../other-binaries-filtered/python3.10.100    |  0
 .../other-binaries-filtered/python3.11        |  0
 .../other-binaries-filtered/python4.8         |  0
 .../testdata/other-binaries-filtered/python5  |  0
 .../testdata/other-binaries-filtered/python6  |  0
 .../testdata/other-binaries-filtered/python7  |  0
 .../testdata/other-binaries-filtered/pythonw  |  0
 .../other-binaries-filtered/real-python3.11.4 |  0
 .../testdata/other-binaries-filtered/whatever |  0
 .../testdata/world-writeable/python8.4        |  0
 libs/python/detect.go                         | 22 +------------------
 libs/python/detect_unix_test.go               | 12 ++--------
 libs/python/detect_win_test.go                |  2 +-
 20 files changed, 8 insertions(+), 37 deletions(-)
 rename {libs/python => cmd/labs/project}/interpreters.go (99%)
 rename {libs/python => cmd/labs/project}/interpreters_unix_test.go (99%)
 rename {libs/python => cmd/labs/project}/interpreters_win_test.go (97%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3-whatever (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3.10 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3.10.100 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3.11 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python4.8 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python5 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python6 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python7 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/pythonw (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/real-python3.11.4 (100%)
 rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/whatever (100%)
 rename {libs/python => cmd/labs/project}/testdata/world-writeable/python8.4 (100%)

diff --git a/cmd/labs/project/installer.go b/cmd/labs/project/installer.go
index 7d31623bb..05f7d68aa 100644
--- a/cmd/labs/project/installer.go
+++ b/cmd/labs/project/installer.go
@@ -15,7 +15,6 @@ import (
 	"github.com/databricks/cli/libs/databrickscfg/profile"
 	"github.com/databricks/cli/libs/log"
 	"github.com/databricks/cli/libs/process"
-	"github.com/databricks/cli/libs/python"
 	"github.com/databricks/databricks-sdk-go"
 	"github.com/databricks/databricks-sdk-go/service/compute"
 	"github.com/databricks/databricks-sdk-go/service/sql"
@@ -223,7 +222,7 @@ func (i *installer) setupPythonVirtualEnvironment(ctx context.Context, w *databr
 	feedback := cmdio.Spinner(ctx)
 	defer close(feedback)
 	feedback <- "Detecting all installed Python interpreters on the system"
-	pythonInterpreters, err := python.DetectInterpreters(ctx)
+	pythonInterpreters, err := DetectInterpreters(ctx)
 	if err != nil {
 		return fmt.Errorf("detect: %w", err)
 	}
diff --git a/libs/python/interpreters.go b/cmd/labs/project/interpreters.go
similarity index 99%
rename from libs/python/interpreters.go
rename to cmd/labs/project/interpreters.go
index 6071309a8..00f099ed4 100644
--- a/libs/python/interpreters.go
+++ b/cmd/labs/project/interpreters.go
@@ -1,4 +1,4 @@
-package python
+package project
 
 import (
 	"context"
diff --git a/libs/python/interpreters_unix_test.go b/cmd/labs/project/interpreters_unix_test.go
similarity index 99%
rename from libs/python/interpreters_unix_test.go
rename to cmd/labs/project/interpreters_unix_test.go
index 57adc9279..a5bbb6468 100644
--- a/libs/python/interpreters_unix_test.go
+++ b/cmd/labs/project/interpreters_unix_test.go
@@ -1,6 +1,6 @@
 //go:build unix
 
-package python
+package project
 
 import (
 	"context"
diff --git a/libs/python/interpreters_win_test.go b/cmd/labs/project/interpreters_win_test.go
similarity index 97%
rename from libs/python/interpreters_win_test.go
rename to cmd/labs/project/interpreters_win_test.go
index f99981529..2316daa30 100644
--- a/libs/python/interpreters_win_test.go
+++ b/cmd/labs/project/interpreters_win_test.go
@@ -1,6 +1,6 @@
 //go:build windows
 
-package python
+package project
 
 import (
 	"context"
diff --git a/libs/python/testdata/other-binaries-filtered/python b/cmd/labs/project/testdata/other-binaries-filtered/python
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python
rename to cmd/labs/project/testdata/other-binaries-filtered/python
diff --git a/libs/python/testdata/other-binaries-filtered/python3-whatever b/cmd/labs/project/testdata/other-binaries-filtered/python3-whatever
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python3-whatever
rename to cmd/labs/project/testdata/other-binaries-filtered/python3-whatever
diff --git a/libs/python/testdata/other-binaries-filtered/python3.10 b/cmd/labs/project/testdata/other-binaries-filtered/python3.10
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python3.10
rename to cmd/labs/project/testdata/other-binaries-filtered/python3.10
diff --git a/libs/python/testdata/other-binaries-filtered/python3.10.100 b/cmd/labs/project/testdata/other-binaries-filtered/python3.10.100
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python3.10.100
rename to cmd/labs/project/testdata/other-binaries-filtered/python3.10.100
diff --git a/libs/python/testdata/other-binaries-filtered/python3.11 b/cmd/labs/project/testdata/other-binaries-filtered/python3.11
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python3.11
rename to cmd/labs/project/testdata/other-binaries-filtered/python3.11
diff --git a/libs/python/testdata/other-binaries-filtered/python4.8 b/cmd/labs/project/testdata/other-binaries-filtered/python4.8
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python4.8
rename to cmd/labs/project/testdata/other-binaries-filtered/python4.8
diff --git a/libs/python/testdata/other-binaries-filtered/python5 b/cmd/labs/project/testdata/other-binaries-filtered/python5
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python5
rename to cmd/labs/project/testdata/other-binaries-filtered/python5
diff --git a/libs/python/testdata/other-binaries-filtered/python6 b/cmd/labs/project/testdata/other-binaries-filtered/python6
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python6
rename to cmd/labs/project/testdata/other-binaries-filtered/python6
diff --git a/libs/python/testdata/other-binaries-filtered/python7 b/cmd/labs/project/testdata/other-binaries-filtered/python7
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/python7
rename to cmd/labs/project/testdata/other-binaries-filtered/python7
diff --git a/libs/python/testdata/other-binaries-filtered/pythonw b/cmd/labs/project/testdata/other-binaries-filtered/pythonw
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/pythonw
rename to cmd/labs/project/testdata/other-binaries-filtered/pythonw
diff --git a/libs/python/testdata/other-binaries-filtered/real-python3.11.4 b/cmd/labs/project/testdata/other-binaries-filtered/real-python3.11.4
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/real-python3.11.4
rename to cmd/labs/project/testdata/other-binaries-filtered/real-python3.11.4
diff --git a/libs/python/testdata/other-binaries-filtered/whatever b/cmd/labs/project/testdata/other-binaries-filtered/whatever
similarity index 100%
rename from libs/python/testdata/other-binaries-filtered/whatever
rename to cmd/labs/project/testdata/other-binaries-filtered/whatever
diff --git a/libs/python/testdata/world-writeable/python8.4 b/cmd/labs/project/testdata/world-writeable/python8.4
similarity index 100%
rename from libs/python/testdata/world-writeable/python8.4
rename to cmd/labs/project/testdata/world-writeable/python8.4
diff --git a/libs/python/detect.go b/libs/python/detect.go
index e86d9d621..75158da65 100644
--- a/libs/python/detect.go
+++ b/libs/python/detect.go
@@ -39,27 +39,7 @@ func DetectExecutable(ctx context.Context) (string, error) {
 	//
 	// See https://github.com/pyenv/pyenv#understanding-python-version-selection
 
-	out, err := exec.LookPath(GetExecutable())
-
-	// most of the OS'es have python3 in $PATH, but for those which don't,
-	// we perform the latest version lookup
-	if err != nil && !errors.Is(err, exec.ErrNotFound) {
-		return "", err
-	}
-	if out != "" {
-		return out, nil
-	}
-	// otherwise, detect all interpreters and pick the least that satisfies
-	// minimal version requirements
-	all, err := DetectInterpreters(ctx)
-	if err != nil {
-		return "", err
-	}
-	interpreter, err := all.AtLeast("3.8")
-	if err != nil {
-		return "", err
-	}
-	return interpreter.Path, nil
+	return exec.LookPath(GetExecutable())
 }
 
 // DetectVEnvExecutable returns the path to the python3 executable inside venvPath,
diff --git a/libs/python/detect_unix_test.go b/libs/python/detect_unix_test.go
index a962e1f55..1774aa108 100644
--- a/libs/python/detect_unix_test.go
+++ b/libs/python/detect_unix_test.go
@@ -16,24 +16,16 @@ func TestDetectsViaPathLookup(t *testing.T) {
 	assert.NotEmpty(t, py)
 }
 
-func TestDetectsViaListing(t *testing.T) {
-	t.Setenv("PATH", "testdata/other-binaries-filtered")
-	ctx := context.Background()
-	py, err := DetectExecutable(ctx)
-	assert.NoError(t, err)
-	assert.Equal(t, "testdata/other-binaries-filtered/python3.10", py)
-}
-
 func TestDetectFailsNoInterpreters(t *testing.T) {
 	t.Setenv("PATH", "testdata")
 	ctx := context.Background()
 	_, err := DetectExecutable(ctx)
-	assert.Equal(t, ErrNoPythonInterpreters, err)
+	assert.Error(t, err)
 }
 
 func TestDetectFailsNoMinimalVersion(t *testing.T) {
 	t.Setenv("PATH", "testdata/no-python3")
 	ctx := context.Background()
 	_, err := DetectExecutable(ctx)
-	assert.EqualError(t, err, "cannot find Python greater or equal to v3.8.0")
+	assert.Error(t, err)
 }
diff --git a/libs/python/detect_win_test.go b/libs/python/detect_win_test.go
index 2ef811a4b..7b2ee281e 100644
--- a/libs/python/detect_win_test.go
+++ b/libs/python/detect_win_test.go
@@ -20,5 +20,5 @@ func TestDetectFailsNoInterpreters(t *testing.T) {
 	t.Setenv("PATH", "testdata")
 	ctx := context.Background()
 	_, err := DetectExecutable(ctx)
-	assert.ErrorIs(t, err, ErrNoPythonInterpreters)
+	assert.Error(t, err)
 }

From 52bf7e388a80beb95d248dc623cfda3cf5d5e137 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 15:25:56 +0100
Subject: [PATCH 26/39] acc: Propagate user's UV_CACHE_DIR to tests (#2239)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

There is a speed up in 0.5s but it is still 4.4s, so something else is
slow there.

Benchmarking bundle/templates/experimental-jobs-as-code:

```
# Without UV_CACHE_DIR
~/work/cli/acceptance/bundle/templates/experimental-jobs-as-code % hyperfine --warmup 2 'testme -count=1'
Benchmark 1: testme -count=1
  Time (mean ± σ):      4.950 s ±  0.079 s    [User: 2.730 s, System: 8.524 s]
  Range (min … max):    4.838 s …  5.076 s    10 runs

# With UV_CACHE_DIR
~/work/cli/acceptance/bundle/templates/experimental-jobs-as-code % hyperfine --warmup 2 'testme -count=1'
Benchmark 1: testme -count=1
  Time (mean ± σ):      4.410 s ±  0.049 s    [User: 2.669 s, System: 8.710 s]
  Range (min … max):    4.324 s …  4.467 s    10 runs
```
---
 acceptance/acceptance_test.go                   | 17 +++++++++++++++++
 .../templates/experimental-jobs-as-code/script  |  2 +-
 2 files changed, 18 insertions(+), 1 deletion(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index e48bd9908..47295b47a 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -100,6 +100,10 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
 	// Prevent CLI from downloading terraform in each test:
 	t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
 
+	// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
+	uvCache := getUVDefaultCacheDir(t)
+	t.Setenv("UV_CACHE_DIR", uvCache)
+
 	ctx := context.Background()
 	cloudEnv := os.Getenv("CLOUD_ENV")
 
@@ -486,3 +490,16 @@ func ListDir(t *testing.T, src string) []string {
 	}
 	return files
 }
+
+func getUVDefaultCacheDir(t *testing.T) string {
+	// According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration
+	// the default cache directory is
+	// "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows"
+	cacheDir, err := os.UserCacheDir()
+	require.NoError(t, err)
+	if runtime.GOOS == "windows" {
+		return cacheDir + "\\uv\\cache"
+	} else {
+		return cacheDir + "/uv"
+	}
+}
diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/script b/acceptance/bundle/templates/experimental-jobs-as-code/script
index af28b9d0a..0223b3326 100644
--- a/acceptance/bundle/templates/experimental-jobs-as-code/script
+++ b/acceptance/bundle/templates/experimental-jobs-as-code/script
@@ -3,7 +3,7 @@ trace $CLI bundle init experimental-jobs-as-code --config-file ./input.json --ou
 cd output/my_jobs_as_code
 
 # silence uv output because it's non-deterministic
-uv sync 2> /dev/null
+uv sync -q
 
 # remove version constraint because it always creates a warning on dev builds
 cat databricks.yml | grep -v databricks_cli_version > databricks.yml.new

From 67d1413db5b84df6643f3c1571abae13da14c6e2 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 15:34:53 +0100
Subject: [PATCH 27/39] Add default regex for DEV_VERSION (#2241)

## Changes

- Replace development version with $DEV_VERSION
- Update experimental-jobs-as-code to make use of it.

## Tests
- Existing tests.
- Using this in https://github.com/databricks/cli/pull/2213
---
 acceptance/acceptance_test.go                              | 1 +
 .../bundle/templates/experimental-jobs-as-code/output.txt  | 2 ++
 .../output/my_jobs_as_code/databricks.yml                  | 1 +
 .../bundle/templates/experimental-jobs-as-code/script      | 4 ----
 acceptance/selftest/output.txt                             | 4 ++++
 acceptance/selftest/script                                 | 3 +++
 libs/testdiff/replacement.go                               | 7 +++++++
 7 files changed, 18 insertions(+), 4 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 47295b47a..5eb08f674 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -128,6 +128,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
 	testdiff.PrepareReplacementsUser(t, &repls, *user)
 	testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
 	testdiff.PrepareReplacementsUUID(t, &repls)
+	testdiff.PrepareReplacementsDevVersion(t, &repls)
 
 	testDirs := getTests(t)
 	require.NotEmpty(t, testDirs)
diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output.txt b/acceptance/bundle/templates/experimental-jobs-as-code/output.txt
index 1aa8a94d5..10aca003e 100644
--- a/acceptance/bundle/templates/experimental-jobs-as-code/output.txt
+++ b/acceptance/bundle/templates/experimental-jobs-as-code/output.txt
@@ -10,6 +10,8 @@ Please refer to the README.md file for "getting started" instructions.
 See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
 
 >>> $CLI bundle validate -t dev --output json
+Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: $DEV_VERSION
+
 {
   "jobs": {
     "my_jobs_as_code_job": {
diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml
index fd87aa381..a1a93d95c 100644
--- a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml
+++ b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml
@@ -3,6 +3,7 @@
 bundle:
   name: my_jobs_as_code
   uuid: <UUID>
+  databricks_cli_version: ">= 0.238.0"
 
 experimental:
   python:
diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/script b/acceptance/bundle/templates/experimental-jobs-as-code/script
index 0223b3326..10188aabd 100644
--- a/acceptance/bundle/templates/experimental-jobs-as-code/script
+++ b/acceptance/bundle/templates/experimental-jobs-as-code/script
@@ -5,10 +5,6 @@ cd output/my_jobs_as_code
 # silence uv output because it's non-deterministic
 uv sync -q
 
-# remove version constraint because it always creates a warning on dev builds
-cat databricks.yml | grep -v databricks_cli_version > databricks.yml.new
-mv databricks.yml.new databricks.yml
-
 trace $CLI bundle validate -t dev --output json | jq ".resources"
 
 rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info
diff --git a/acceptance/selftest/output.txt b/acceptance/selftest/output.txt
index 9fdfbc1e7..91aa8c33e 100644
--- a/acceptance/selftest/output.txt
+++ b/acceptance/selftest/output.txt
@@ -33,3 +33,7 @@ $TMPDIR/subdir/a/b/c
 1234
 CUSTOM_NUMBER_REGEX
 123456
+
+=== Testing --version
+>>> $CLI --version
+Databricks CLI v$DEV_VERSION
diff --git a/acceptance/selftest/script b/acceptance/selftest/script
index 665726167..bccf30e71 100644
--- a/acceptance/selftest/script
+++ b/acceptance/selftest/script
@@ -24,3 +24,6 @@ printf "\n=== Custom regex can be specified in [[Repl]] section\n"
 echo 1234
 echo 12345
 echo 123456
+
+printf "\n=== Testing --version"
+trace $CLI --version
diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go
index b512374a3..40e7e72b4 100644
--- a/libs/testdiff/replacement.go
+++ b/libs/testdiff/replacement.go
@@ -23,6 +23,8 @@ var (
 	uuidRegex        = regexp.MustCompile(`[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}`)
 	numIdRegex       = regexp.MustCompile(`[0-9]{3,}`)
 	privatePathRegex = regexp.MustCompile(`(/tmp|/private)(/.*)/([a-zA-Z0-9]+)`)
+	// Version could v0.0.0-dev+21e1aacf518a or just v0.0.0-dev (the latter is currently the case on Windows)
+	devVersionRegex = regexp.MustCompile(`0\.0\.0-dev(\+[a-f0-9]{10,16})?`)
 )
 
 type Replacement struct {
@@ -211,3 +213,8 @@ func PrepareReplacementsTemporaryDirectory(t testutil.TestingT, r *ReplacementsC
 	t.Helper()
 	r.append(privatePathRegex, "/tmp/.../$3")
 }
+
+func PrepareReplacementsDevVersion(t testutil.TestingT, r *ReplacementsContext) {
+	t.Helper()
+	r.append(devVersionRegex, "$$DEV_VERSION")
+}

From be908ee1a17abe36c573a24ac83033243c154379 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Mon, 27 Jan 2025 16:28:33 +0100
Subject: [PATCH 28/39] Add acceptance test for 'experimental.scripts' (#2240)

---
 acceptance/bundle/scripts/databricks.yml | 11 +++++
 acceptance/bundle/scripts/myscript.py    |  8 ++++
 acceptance/bundle/scripts/output.txt     | 52 ++++++++++++++++++++++++
 acceptance/bundle/scripts/script         |  3 ++
 acceptance/server_test.go                |  4 ++
 bundle/scripts/scripts_test.go           | 51 -----------------------
 6 files changed, 78 insertions(+), 51 deletions(-)
 create mode 100644 acceptance/bundle/scripts/databricks.yml
 create mode 100644 acceptance/bundle/scripts/myscript.py
 create mode 100644 acceptance/bundle/scripts/output.txt
 create mode 100644 acceptance/bundle/scripts/script
 delete mode 100644 bundle/scripts/scripts_test.go

diff --git a/acceptance/bundle/scripts/databricks.yml b/acceptance/bundle/scripts/databricks.yml
new file mode 100644
index 000000000..6421e2b59
--- /dev/null
+++ b/acceptance/bundle/scripts/databricks.yml
@@ -0,0 +1,11 @@
+bundle:
+  name: scripts
+
+experimental:
+  scripts:
+    preinit: "python3 ./myscript.py $EXITCODE preinit"
+    postinit: "python3 ./myscript.py 0 postinit"
+    prebuild: "python3 ./myscript.py 0 prebuild"
+    postbuild: "python3 ./myscript.py 0 postbuild"
+    predeploy: "python3 ./myscript.py 0 predeploy"
+    postdeploy: "python3 ./myscript.py 0 postdeploy"
diff --git a/acceptance/bundle/scripts/myscript.py b/acceptance/bundle/scripts/myscript.py
new file mode 100644
index 000000000..d10f497e1
--- /dev/null
+++ b/acceptance/bundle/scripts/myscript.py
@@ -0,0 +1,8 @@
+import sys
+
+info = " ".join(sys.argv[1:])
+sys.stderr.write(f"from myscript.py {info}: hello stderr!\n")
+sys.stdout.write(f"from myscript.py {info}: hello stdout!\n")
+
+exitcode = int(sys.argv[1])
+sys.exit(exitcode)
diff --git a/acceptance/bundle/scripts/output.txt b/acceptance/bundle/scripts/output.txt
new file mode 100644
index 000000000..ec5978380
--- /dev/null
+++ b/acceptance/bundle/scripts/output.txt
@@ -0,0 +1,52 @@
+
+>>> EXITCODE=0 errcode $CLI bundle validate
+Executing 'preinit' script
+from myscript.py 0 preinit: hello stdout!
+from myscript.py 0 preinit: hello stderr!
+Executing 'postinit' script
+from myscript.py 0 postinit: hello stdout!
+from myscript.py 0 postinit: hello stderr!
+Name: scripts
+Target: default
+Workspace:
+  User: $USERNAME
+  Path: /Workspace/Users/$USERNAME/.bundle/scripts/default
+
+Validation OK!
+
+>>> EXITCODE=1 errcode $CLI bundle validate
+Executing 'preinit' script
+from myscript.py 1 preinit: hello stdout!
+from myscript.py 1 preinit: hello stderr!
+Error: failed to execute script: exit status 1
+
+Name: scripts
+
+Found 1 error
+
+Exit code: 1
+
+>>> EXITCODE=0 errcode $CLI bundle deploy
+Executing 'preinit' script
+from myscript.py 0 preinit: hello stdout!
+from myscript.py 0 preinit: hello stderr!
+Executing 'postinit' script
+from myscript.py 0 postinit: hello stdout!
+from myscript.py 0 postinit: hello stderr!
+Executing 'prebuild' script
+from myscript.py 0 prebuild: hello stdout!
+from myscript.py 0 prebuild: hello stderr!
+Executing 'postbuild' script
+from myscript.py 0 postbuild: hello stdout!
+from myscript.py 0 postbuild: hello stderr!
+Executing 'predeploy' script
+from myscript.py 0 predeploy: hello stdout!
+from myscript.py 0 predeploy: hello stderr!
+Error: unable to deploy to /Workspace/Users/$USERNAME/.bundle/scripts/default/state as $USERNAME.
+Please make sure the current user or one of their groups is listed under the permissions of this bundle.
+For assistance, contact the owners of this project.
+They may need to redeploy the bundle to apply the new permissions.
+Please refer to https://docs.databricks.com/dev-tools/bundles/permissions.html for more on managing permissions.
+
+
+Exit code: 1
diff --git a/acceptance/bundle/scripts/script b/acceptance/bundle/scripts/script
new file mode 100644
index 000000000..de07d277e
--- /dev/null
+++ b/acceptance/bundle/scripts/script
@@ -0,0 +1,3 @@
+trace EXITCODE=0 errcode $CLI bundle validate
+trace EXITCODE=1 errcode $CLI bundle validate
+trace EXITCODE=0 errcode $CLI bundle deploy
diff --git a/acceptance/server_test.go b/acceptance/server_test.go
index eb8cbb24a..dbc55c03f 100644
--- a/acceptance/server_test.go
+++ b/acceptance/server_test.go
@@ -146,4 +146,8 @@ func AddHandlers(server *TestServer) {
 			},
 		}, nil
 	})
+
+	server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, error) {
+		return "{}", nil
+	})
 }
diff --git a/bundle/scripts/scripts_test.go b/bundle/scripts/scripts_test.go
deleted file mode 100644
index 0c92bc2c3..000000000
--- a/bundle/scripts/scripts_test.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package scripts
-
-import (
-	"bufio"
-	"context"
-	"strings"
-	"testing"
-
-	"github.com/databricks/cli/bundle"
-	"github.com/databricks/cli/bundle/config"
-	"github.com/databricks/cli/libs/exec"
-	"github.com/stretchr/testify/require"
-)
-
-func TestExecutesHook(t *testing.T) {
-	b := &bundle.Bundle{
-		Config: config.Root{
-			Experimental: &config.Experimental{
-				Scripts: map[config.ScriptHook]config.Command{
-					config.ScriptPreBuild: "echo 'Hello'",
-				},
-			},
-		},
-	}
-
-	executor, err := exec.NewCommandExecutor(b.BundleRootPath)
-	require.NoError(t, err)
-	_, out, err := executeHook(context.Background(), executor, b, config.ScriptPreBuild)
-	require.NoError(t, err)
-
-	reader := bufio.NewReader(out)
-	line, err := reader.ReadString('\n')
-
-	require.NoError(t, err)
-	require.Equal(t, "Hello", strings.TrimSpace(line))
-}
-
-func TestExecuteMutator(t *testing.T) {
-	b := &bundle.Bundle{
-		Config: config.Root{
-			Experimental: &config.Experimental{
-				Scripts: map[config.ScriptHook]config.Command{
-					config.ScriptPreBuild: "echo 'Hello'",
-				},
-			},
-		},
-	}
-
-	diags := bundle.Apply(context.Background(), b, Execute(config.ScriptPreInit))
-	require.NoError(t, diags.Error())
-}

From 60709e3d48a711b931d341196120f4450ee78499 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Tue, 28 Jan 2025 11:15:32 +0100
Subject: [PATCH 29/39] acc: Restore unexpected output error (#2243)

## Changes
Restore original behaviour of acceptance tests: any unaccounted for
files trigger an error (not just those that start with "out"). This got
changed in
https://github.com/databricks/cli/pull/2146/files#diff-2bb968d823f4afb825e1dcea2879bdbdedf2b7c15d4e77f47905691b14246a04L196
which started only checking files starting with "out*" and skipping
everything else.

## Tests
Existing tests.
---
 acceptance/acceptance_test.go                   | 1 +
 acceptance/bundle/git-permerror/script          | 3 ++-
 acceptance/bundle/syncroot/dotdot-git/script    | 4 +++-
 acceptance/bundle/syncroot/dotdot-git/test.toml | 3 +++
 4 files changed, 9 insertions(+), 2 deletions(-)
 create mode 100644 acceptance/bundle/syncroot/dotdot-git/test.toml

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 5eb08f674..2d67fb269 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -245,6 +245,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
 		if _, ok := outputs[relPath]; ok {
 			continue
 		}
+		t.Errorf("Unexpected output: %s", relPath)
 		if strings.HasPrefix(relPath, "out") {
 			// We have a new file starting with "out"
 			// Show the contents & support overwrite mode for it:
diff --git a/acceptance/bundle/git-permerror/script b/acceptance/bundle/git-permerror/script
index 782cbf5bc..3a9b4db24 100644
--- a/acceptance/bundle/git-permerror/script
+++ b/acceptance/bundle/git-permerror/script
@@ -22,4 +22,5 @@ trace chmod 000 .git/config
 errcode trace $CLI bundle validate -o json | jq .bundle.git
 errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
 
-rm -fr .git
+cd ..
+rm -fr myrepo
diff --git a/acceptance/bundle/syncroot/dotdot-git/script b/acceptance/bundle/syncroot/dotdot-git/script
index 0706a1d5e..278e77101 100644
--- a/acceptance/bundle/syncroot/dotdot-git/script
+++ b/acceptance/bundle/syncroot/dotdot-git/script
@@ -3,4 +3,6 @@ mkdir myrepo
 cd myrepo
 cp ../databricks.yml .
 git-repo-init
-$CLI bundle validate | sed 's/\\\\/\//g'
+errcode $CLI bundle validate
+cd ..
+rm -fr myrepo
diff --git a/acceptance/bundle/syncroot/dotdot-git/test.toml b/acceptance/bundle/syncroot/dotdot-git/test.toml
new file mode 100644
index 000000000..f57f83ee4
--- /dev/null
+++ b/acceptance/bundle/syncroot/dotdot-git/test.toml
@@ -0,0 +1,3 @@
+[[Repls]]
+Old = '\\\\myrepo'
+New = '/myrepo'

From 11436faafe5361bd390fa04dc699807e31db6144 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Tue, 28 Jan 2025 11:22:29 +0100
Subject: [PATCH 30/39] acc: Avoid reading and applying replacements on large
 files; validate utf8 (#2244)

## Changes
- Do not start replacement / comparison if file is too large or not
valid utf-8.
- This helps to prevent replacements if there is accidentally a large
binary (e.g. terraform).

## Tests
Found this problem when working on
https://github.com/databricks/cli/pull/2242 -- the tests tried to
applied replacements on terraform binary and crashed. With this change,
an error is reported instead.
---
 acceptance/acceptance_test.go | 51 ++++++++++++++++++++++++-----------
 1 file changed, 35 insertions(+), 16 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 2d67fb269..877c7239d 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -15,6 +15,7 @@ import (
 	"strings"
 	"testing"
 	"time"
+	"unicode/utf8"
 
 	"github.com/databricks/cli/internal/testutil"
 	"github.com/databricks/cli/libs/env"
@@ -44,6 +45,7 @@ const (
 	EntryPointScript = "script"
 	CleanupScript    = "script.cleanup"
 	PrepareScript    = "script.prepare"
+	MaxFileSize      = 100_000
 )
 
 var Scripts = map[string]bool{
@@ -257,15 +259,15 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
 func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
 	pathRef := filepath.Join(dirRef, relPath)
 	pathNew := filepath.Join(dirNew, relPath)
-	bufRef, okRef := readIfExists(t, pathRef)
-	bufNew, okNew := readIfExists(t, pathNew)
+	bufRef, okRef := tryReading(t, pathRef)
+	bufNew, okNew := tryReading(t, pathNew)
 	if !okRef && !okNew {
-		t.Errorf("Both files are missing: %s, %s", pathRef, pathNew)
+		t.Errorf("Both files are missing or have errors: %s, %s", pathRef, pathNew)
 		return
 	}
 
-	valueRef := testdiff.NormalizeNewlines(string(bufRef))
-	valueNew := testdiff.NormalizeNewlines(string(bufNew))
+	valueRef := testdiff.NormalizeNewlines(bufRef)
+	valueNew := testdiff.NormalizeNewlines(bufNew)
 
 	// Apply replacements to the new value only.
 	// The reference value is stored after applying replacements.
@@ -323,14 +325,14 @@ func readMergedScriptContents(t *testing.T, dir string) string {
 	cleanups := []string{}
 
 	for {
-		x, ok := readIfExists(t, filepath.Join(dir, CleanupScript))
+		x, ok := tryReading(t, filepath.Join(dir, CleanupScript))
 		if ok {
-			cleanups = append(cleanups, string(x))
+			cleanups = append(cleanups, x)
 		}
 
-		x, ok = readIfExists(t, filepath.Join(dir, PrepareScript))
+		x, ok = tryReading(t, filepath.Join(dir, PrepareScript))
 		if ok {
-			prepares = append(prepares, string(x))
+			prepares = append(prepares, x)
 		}
 
 		if dir == "" || dir == "." {
@@ -417,16 +419,33 @@ func formatOutput(w io.Writer, err error) {
 	}
 }
 
-func readIfExists(t *testing.T, path string) ([]byte, bool) {
-	data, err := os.ReadFile(path)
-	if err == nil {
-		return data, true
+func tryReading(t *testing.T, path string) (string, bool) {
+	info, err := os.Stat(path)
+	if err != nil {
+		if !errors.Is(err, os.ErrNotExist) {
+			t.Errorf("%s: %s", path, err)
+		}
+		return "", false
 	}
 
-	if !errors.Is(err, os.ErrNotExist) {
-		t.Fatalf("%s: %s", path, err)
+	if info.Size() > MaxFileSize {
+		t.Errorf("%s: ignoring, too large: %d", path, info.Size())
+		return "", false
 	}
-	return []byte{}, false
+
+	data, err := os.ReadFile(path)
+	if err != nil {
+		// already checked ErrNotExist above
+		t.Errorf("%s: %s", path, err)
+		return "", false
+	}
+
+	if !utf8.Valid(data) {
+		t.Errorf("%s: not valid utf-8", path)
+		return "", false
+	}
+
+	return string(data), true
 }
 
 func CopyDir(src, dst string, inputs, outputs map[string]bool) error {

From 3ffac800071a397763bddb49e22c1aca4f55573c Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Tue, 28 Jan 2025 11:23:44 +0100
Subject: [PATCH 31/39] acc: Use real terraform when CLOUD_ENV is set (#2245)

## Changes
- If CLOUD_ENV is set to do not override with dummy value. This allows
running acceptance tests as integration tests.
- Needed for https://github.com/databricks/cli/pull/2242

## Tests
Manually run the test suite against dogfood. `CLOUD_ENV=aws go test
./acceptance`
---
 acceptance/acceptance_test.go | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index 877c7239d..b4b27f201 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -99,9 +99,6 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
 	repls.SetPath(tempHomeDir, "$TMPHOME")
 	t.Logf("$TMPHOME=%v", tempHomeDir)
 
-	// Prevent CLI from downloading terraform in each test:
-	t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
-
 	// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
 	uvCache := getUVDefaultCacheDir(t)
 	t.Setenv("UV_CACHE_DIR", uvCache)
@@ -119,6 +116,9 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
 		homeDir := t.TempDir()
 		// Do not read user's ~/.databrickscfg
 		t.Setenv(env.HomeEnvVar(), homeDir)
+
+		// Prevent CLI from downloading terraform in each test:
+		t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
 	}
 
 	workspaceClient, err := databricks.NewWorkspaceClient()

From 65e4f79dfec84f45689ec3241da62ca3660112e6 Mon Sep 17 00:00:00 2001
From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com>
Date: Tue, 28 Jan 2025 16:24:23 +0530
Subject: [PATCH 32/39] Switch to using `[` from `<` in text replacements
 (#2224)

## Changes
Noticed this when working on
https://github.com/databricks/cli/pull/2221. `<` is a special HTML
character that is encoded during text replacement when using
`AssertEqualTexts`.


## Tests
N/A
---
 .../dbt-sql/output/my_dbt_sql/databricks.yml         |  2 +-
 .../output/my_default_python/databricks.yml          |  2 +-
 .../my_default_python/scratch/exploration.ipynb      |  2 +-
 .../output/my_default_python/src/dlt_pipeline.ipynb  |  6 +++---
 .../output/my_default_python/src/notebook.ipynb      |  4 ++--
 .../default-sql/output/my_default_sql/databricks.yml |  2 +-
 .../output/my_default_sql/scratch/exploration.ipynb  |  2 +-
 .../output/my_jobs_as_code/databricks.yml            |  2 +-
 .../output/my_jobs_as_code/src/notebook.ipynb        |  4 ++--
 .../bundle/testdata/default_python/bundle_deploy.txt |  2 +-
 .../testdata/default_python/bundle_summary.txt       | 12 ++++++------
 libs/testdiff/replacement.go                         |  4 ++--
 libs/testdiff/replacement_test.go                    |  4 ++--
 13 files changed, 24 insertions(+), 24 deletions(-)

diff --git a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml
index 1962bc543..cdf3704b9 100644
--- a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml
+++ b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml
@@ -3,7 +3,7 @@
 # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
 bundle:
   name: my_dbt_sql
-  uuid: <UUID>
+  uuid: [UUID]
 
 include:
   - resources/*.yml
diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml b/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml
index 9deca9cf5..3fa777219 100644
--- a/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml
+++ b/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml
@@ -2,7 +2,7 @@
 # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
 bundle:
   name: my_default_python
-  uuid: <UUID>
+  uuid: [UUID]
 
 include:
   - resources/*.yml
diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb b/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb
index 3b2fef4b4..a12773d4e 100644
--- a/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb
+++ b/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb
@@ -20,7 +20,7 @@
       "rowLimit": 10000
      },
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb b/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb
index 36e993af7..8a02183e7 100644
--- a/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb
+++ b/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb
@@ -6,7 +6,7 @@
     "application/vnd.databricks.v1+cell": {
      "cellMetadata": {},
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
@@ -24,7 +24,7 @@
     "application/vnd.databricks.v1+cell": {
      "cellMetadata": {},
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
@@ -47,7 +47,7 @@
     "application/vnd.databricks.v1+cell": {
      "cellMetadata": {},
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb b/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb
index 0d560443b..472ccb219 100644
--- a/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb
+++ b/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb
@@ -6,7 +6,7 @@
     "application/vnd.databricks.v1+cell": {
      "cellMetadata": {},
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
@@ -37,7 +37,7 @@
       "rowLimit": 10000
      },
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
diff --git a/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml b/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml
index ab857287e..16292bc84 100644
--- a/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml
+++ b/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml
@@ -2,7 +2,7 @@
 # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
 bundle:
   name: my_default_sql
-  uuid: <UUID>
+  uuid: [UUID]
 
 include:
   - resources/*.yml
diff --git a/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb b/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb
index c3fd072e5..f3976c1de 100644
--- a/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb
+++ b/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb
@@ -7,7 +7,7 @@
     "application/vnd.databricks.v1+cell": {
      "cellMetadata": {},
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml
index a1a93d95c..54e69a256 100644
--- a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml
+++ b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml
@@ -2,7 +2,7 @@
 # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
 bundle:
   name: my_jobs_as_code
-  uuid: <UUID>
+  uuid: [UUID]
   databricks_cli_version: ">= 0.238.0"
 
 experimental:
diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb
index 9bc3f1560..227c7cc55 100644
--- a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb
+++ b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb
@@ -6,7 +6,7 @@
     "application/vnd.databricks.v1+cell": {
      "cellMetadata": {},
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
@@ -37,7 +37,7 @@
       "rowLimit": 10000
      },
      "inputWidgets": {},
-     "nuid": "<UUID>",
+     "nuid": "[UUID]",
      "showTitle": false,
      "title": ""
     }
diff --git a/integration/bundle/testdata/default_python/bundle_deploy.txt b/integration/bundle/testdata/default_python/bundle_deploy.txt
index eef0b79b3..d7b8cede9 100644
--- a/integration/bundle/testdata/default_python/bundle_deploy.txt
+++ b/integration/bundle/testdata/default_python/bundle_deploy.txt
@@ -1,5 +1,5 @@
 Building project_name_$UNIQUE_PRJ...
-Uploading project_name_$UNIQUE_PRJ-0.0.1+<NUMID>.<NUMID>-py3-none-any.whl...
+Uploading project_name_$UNIQUE_PRJ-0.0.1+[NUMID].[NUMID]-py3-none-any.whl...
 Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files...
 Deploying resources...
 Updating deployment state...
diff --git a/integration/bundle/testdata/default_python/bundle_summary.txt b/integration/bundle/testdata/default_python/bundle_summary.txt
index 318cd2543..88ccdc496 100644
--- a/integration/bundle/testdata/default_python/bundle_summary.txt
+++ b/integration/bundle/testdata/default_python/bundle_summary.txt
@@ -16,7 +16,7 @@
         "enabled": false
       }
     },
-    "uuid": "<UUID>"
+    "uuid": "[UUID]"
   },
   "include": [
     "resources/project_name_$UNIQUE_PRJ.job.yml",
@@ -74,7 +74,7 @@
           ]
         },
         "format": "MULTI_TASK",
-        "id": "<NUMID>",
+        "id": "[NUMID]",
         "job_clusters": [
           {
             "job_cluster_key": "job_cluster",
@@ -141,7 +141,7 @@
             "unit": "DAYS"
           }
         },
-        "url": "$DATABRICKS_URL/jobs/<NUMID>?o=<NUMID>"
+        "url": "$DATABRICKS_URL/jobs/[NUMID]?o=[NUMID]"
       }
     },
     "pipelines": {
@@ -155,7 +155,7 @@
           "metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
         },
         "development": true,
-        "id": "<UUID>",
+        "id": "[UUID]",
         "libraries": [
           {
             "notebook": {
@@ -165,7 +165,7 @@
         ],
         "name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_pipeline",
         "target": "project_name_$UNIQUE_PRJ_dev",
-        "url": "$DATABRICKS_URL/pipelines/<UUID>?o=<NUMID>"
+        "url": "$DATABRICKS_URL/pipelines/[UUID]?o=[NUMID]"
       }
     }
   },
@@ -183,4 +183,4 @@
       "dev": "$USERNAME"
     }
   }
-}
\ No newline at end of file
+}
diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go
index 40e7e72b4..ce5476a57 100644
--- a/libs/testdiff/replacement.go
+++ b/libs/testdiff/replacement.go
@@ -201,12 +201,12 @@ func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.
 
 func PrepareReplacementsUUID(t testutil.TestingT, r *ReplacementsContext) {
 	t.Helper()
-	r.append(uuidRegex, "<UUID>")
+	r.append(uuidRegex, "[UUID]")
 }
 
 func PrepareReplacementsNumber(t testutil.TestingT, r *ReplacementsContext) {
 	t.Helper()
-	r.append(numIdRegex, "<NUMID>")
+	r.append(numIdRegex, "[NUMID]")
 }
 
 func PrepareReplacementsTemporaryDirectory(t testutil.TestingT, r *ReplacementsContext) {
diff --git a/libs/testdiff/replacement_test.go b/libs/testdiff/replacement_test.go
index de247c03e..1b6c5fe2d 100644
--- a/libs/testdiff/replacement_test.go
+++ b/libs/testdiff/replacement_test.go
@@ -25,7 +25,7 @@ func TestReplacement_UUID(t *testing.T) {
 
 	PrepareReplacementsUUID(t, &repls)
 
-	assert.Equal(t, "<UUID>", repls.Replace("123e4567-e89b-12d3-a456-426614174000"))
+	assert.Equal(t, "[UUID]", repls.Replace("123e4567-e89b-12d3-a456-426614174000"))
 }
 
 func TestReplacement_Number(t *testing.T) {
@@ -34,7 +34,7 @@ func TestReplacement_Number(t *testing.T) {
 	PrepareReplacementsNumber(t, &repls)
 
 	assert.Equal(t, "12", repls.Replace("12"))
-	assert.Equal(t, "<NUMID>", repls.Replace("123"))
+	assert.Equal(t, "[NUMID]", repls.Replace("123"))
 }
 
 func TestReplacement_TemporaryDirectory(t *testing.T) {

From 5971bd5c1ac0997a88f56dd4ccc88acf501e5267 Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Tue, 28 Jan 2025 15:00:41 +0100
Subject: [PATCH 33/39] acc: Disable git hooks (#2249)

Otherwise hooks from universe and custom hooks run in tests.
---
 acceptance/script.prepare | 1 +
 1 file changed, 1 insertion(+)

diff --git a/acceptance/script.prepare b/acceptance/script.prepare
index b814a1260..ca47cdbff 100644
--- a/acceptance/script.prepare
+++ b/acceptance/script.prepare
@@ -39,6 +39,7 @@ git-repo-init() {
     git config core.autocrlf false
     git config user.name "Tester"
     git config user.email "tester@databricks.com"
+    git config core.hooksPath no-hooks
     git add databricks.yml
     git commit -qm 'Add databricks.yml'
 }

From 025622540809702994aaefdb1e387a6552c00afa Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Tue, 28 Jan 2025 15:12:47 +0100
Subject: [PATCH 34/39] acc: Exclude secrets from replacements (#2250)

They should never be printed by CLI anyway.
---
 libs/testdiff/replacement.go | 9 ---------
 1 file changed, 9 deletions(-)

diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go
index ce5476a57..7077e611b 100644
--- a/libs/testdiff/replacement.go
+++ b/libs/testdiff/replacement.go
@@ -146,25 +146,16 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont
 	r.Set(w.Config.ClusterID, "$DATABRICKS_CLUSTER_ID")
 	r.Set(w.Config.WarehouseID, "$DATABRICKS_WAREHOUSE_ID")
 	r.Set(w.Config.ServerlessComputeID, "$DATABRICKS_SERVERLESS_COMPUTE_ID")
-	r.Set(w.Config.MetadataServiceURL, "$DATABRICKS_METADATA_SERVICE_URL")
 	r.Set(w.Config.AccountID, "$DATABRICKS_ACCOUNT_ID")
-	r.Set(w.Config.Token, "$DATABRICKS_TOKEN")
 	r.Set(w.Config.Username, "$DATABRICKS_USERNAME")
-	r.Set(w.Config.Password, "$DATABRICKS_PASSWORD")
 	r.SetPath(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
 	r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE")
 	r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT")
-	r.Set(w.Config.GoogleCredentials, "$GOOGLE_CREDENTIALS")
 	r.Set(w.Config.AzureResourceID, "$DATABRICKS_AZURE_RESOURCE_ID")
-	r.Set(w.Config.AzureClientSecret, "$ARM_CLIENT_SECRET")
-	// r.Set(w.Config.AzureClientID, "$ARM_CLIENT_ID")
 	r.Set(w.Config.AzureClientID, testerName)
 	r.Set(w.Config.AzureTenantID, "$ARM_TENANT_ID")
-	r.Set(w.Config.ActionsIDTokenRequestURL, "$ACTIONS_ID_TOKEN_REQUEST_URL")
-	r.Set(w.Config.ActionsIDTokenRequestToken, "$ACTIONS_ID_TOKEN_REQUEST_TOKEN")
 	r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT")
 	r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID")
-	r.Set(w.Config.ClientSecret, "$DATABRICKS_CLIENT_SECRET")
 	r.SetPath(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
 	// This is set to words like "path" that happen too frequently
 	// r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE")

From 4ba222ab3632c45e488e88d3c54b6e05cbfe441b Mon Sep 17 00:00:00 2001
From: Denis Bilenko <denis.bilenko@databricks.com>
Date: Tue, 28 Jan 2025 15:22:56 +0100
Subject: [PATCH 35/39] Fix env_overrides not to use variables in
 workspace.profile (#2251)

This does not work when this test is run against cloud.

Needed for https://github.com/databricks/cli/pull/2242
---
 acceptance/bundle/variables/env_overrides/databricks.yml | 7 ++++---
 acceptance/bundle/variables/env_overrides/output.txt     | 3 ++-
 acceptance/bundle/variables/env_overrides/script         | 6 +++---
 3 files changed, 9 insertions(+), 7 deletions(-)

diff --git a/acceptance/bundle/variables/env_overrides/databricks.yml b/acceptance/bundle/variables/env_overrides/databricks.yml
index 560513bc3..e5fc7fcc4 100644
--- a/acceptance/bundle/variables/env_overrides/databricks.yml
+++ b/acceptance/bundle/variables/env_overrides/databricks.yml
@@ -18,12 +18,13 @@ variables:
     description: variable with lookup
     lookup:
       cluster_policy: wrong-cluster-policy
+
+  result:
+    default: ${var.a} ${var.b}
+
 bundle:
   name: test bundle
 
-workspace:
-  profile: ${var.a} ${var.b}
-
 targets:
   env-with-single-variable-override:
     variables:
diff --git a/acceptance/bundle/variables/env_overrides/output.txt b/acceptance/bundle/variables/env_overrides/output.txt
index 1ee9ef625..06e6e518b 100644
--- a/acceptance/bundle/variables/env_overrides/output.txt
+++ b/acceptance/bundle/variables/env_overrides/output.txt
@@ -36,5 +36,6 @@ Exit code: 1
   "b": "prod-b",
   "d": "4321",
   "e": "1234",
-  "f": "9876"
+  "f": "9876",
+  "result": "default-a prod-b"
 }
diff --git a/acceptance/bundle/variables/env_overrides/script b/acceptance/bundle/variables/env_overrides/script
index 30919fd8a..3965d1564 100644
--- a/acceptance/bundle/variables/env_overrides/script
+++ b/acceptance/bundle/variables/env_overrides/script
@@ -1,6 +1,6 @@
-trace $CLI bundle validate -t env-with-single-variable-override -o json | jq .workspace.profile
-trace $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile
-trace BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile
+trace $CLI bundle validate -t env-with-single-variable-override -o json | jq .variables.result.value
+trace $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .variables.result.value
+trace BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .variables.result.value
 trace errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
 trace errcode $CLI bundle validate -t env-using-an-undefined-variable
 trace $CLI bundle validate -t env-overrides-lookup -o json | jq '.variables | map_values(.value)'

From 099e9bed0f2250e3dcece80e6e64d8873c75e74d Mon Sep 17 00:00:00 2001
From: Andrew Nester <andrew.nester@databricks.com>
Date: Tue, 28 Jan 2025 15:34:44 +0100
Subject: [PATCH 36/39] Upgrade TF provider to 1.64.1 (#2247)

## Changes
- Added support for `no_compute` in Apps
- Added support for `run_as_repl` for job tasks
---
 bundle/internal/tf/codegen/schema/version.go              | 2 +-
 .../internal/tf/schema/data_source_serving_endpoints.go   | 8 ++++----
 bundle/internal/tf/schema/resource_app.go                 | 1 +
 bundle/internal/tf/schema/resource_job.go                 | 2 ++
 bundle/internal/tf/schema/resource_model_serving.go       | 8 ++++----
 bundle/internal/tf/schema/resource_recipient.go           | 1 +
 bundle/internal/tf/schema/root.go                         | 2 +-
 7 files changed, 14 insertions(+), 10 deletions(-)

diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go
index 677b8fc10..393afd6ed 100644
--- a/bundle/internal/tf/codegen/schema/version.go
+++ b/bundle/internal/tf/codegen/schema/version.go
@@ -1,3 +1,3 @@
 package schema
 
-const ProviderVersion = "1.63.0"
+const ProviderVersion = "1.64.1"
diff --git a/bundle/internal/tf/schema/data_source_serving_endpoints.go b/bundle/internal/tf/schema/data_source_serving_endpoints.go
index bdfd778e0..973989216 100644
--- a/bundle/internal/tf/schema/data_source_serving_endpoints.go
+++ b/bundle/internal/tf/schema/data_source_serving_endpoints.go
@@ -3,7 +3,7 @@
 package schema
 
 type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii struct {
-	Behavior string `json:"behavior"`
+	Behavior string `json:"behavior,omitempty"`
 }
 
 type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput struct {
@@ -14,7 +14,7 @@ type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput struct {
 }
 
 type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii struct {
-	Behavior string `json:"behavior"`
+	Behavior string `json:"behavior,omitempty"`
 }
 
 type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutput struct {
@@ -87,8 +87,8 @@ type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelDatabri
 type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig struct {
 	PrivateKey          string `json:"private_key,omitempty"`
 	PrivateKeyPlaintext string `json:"private_key_plaintext,omitempty"`
-	ProjectId           string `json:"project_id,omitempty"`
-	Region              string `json:"region,omitempty"`
+	ProjectId           string `json:"project_id"`
+	Region              string `json:"region"`
 }
 
 type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelOpenaiConfig struct {
diff --git a/bundle/internal/tf/schema/resource_app.go b/bundle/internal/tf/schema/resource_app.go
index 14c93b793..cbce5ab0e 100644
--- a/bundle/internal/tf/schema/resource_app.go
+++ b/bundle/internal/tf/schema/resource_app.go
@@ -91,6 +91,7 @@ type ResourceApp struct {
 	DefaultSourceCodePath    string                        `json:"default_source_code_path,omitempty"`
 	Description              string                        `json:"description,omitempty"`
 	Name                     string                        `json:"name"`
+	NoCompute                bool                          `json:"no_compute,omitempty"`
 	PendingDeployment        *ResourceAppPendingDeployment `json:"pending_deployment,omitempty"`
 	Resources                []ResourceAppResources        `json:"resources,omitempty"`
 	ServicePrincipalClientId string                        `json:"service_principal_client_id,omitempty"`
diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go
index 63c8aeb7b..da277b5c1 100644
--- a/bundle/internal/tf/schema/resource_job.go
+++ b/bundle/internal/tf/schema/resource_job.go
@@ -904,6 +904,7 @@ type ResourceJobTaskForEachTaskTaskSparkJarTask struct {
 	JarUri        string   `json:"jar_uri,omitempty"`
 	MainClassName string   `json:"main_class_name,omitempty"`
 	Parameters    []string `json:"parameters,omitempty"`
+	RunAsRepl     bool     `json:"run_as_repl,omitempty"`
 }
 
 type ResourceJobTaskForEachTaskTaskSparkPythonTask struct {
@@ -1299,6 +1300,7 @@ type ResourceJobTaskSparkJarTask struct {
 	JarUri        string   `json:"jar_uri,omitempty"`
 	MainClassName string   `json:"main_class_name,omitempty"`
 	Parameters    []string `json:"parameters,omitempty"`
+	RunAsRepl     bool     `json:"run_as_repl,omitempty"`
 }
 
 type ResourceJobTaskSparkPythonTask struct {
diff --git a/bundle/internal/tf/schema/resource_model_serving.go b/bundle/internal/tf/schema/resource_model_serving.go
index 71cf8925d..2025de34c 100644
--- a/bundle/internal/tf/schema/resource_model_serving.go
+++ b/bundle/internal/tf/schema/resource_model_serving.go
@@ -3,7 +3,7 @@
 package schema
 
 type ResourceModelServingAiGatewayGuardrailsInputPii struct {
-	Behavior string `json:"behavior"`
+	Behavior string `json:"behavior,omitempty"`
 }
 
 type ResourceModelServingAiGatewayGuardrailsInput struct {
@@ -14,7 +14,7 @@ type ResourceModelServingAiGatewayGuardrailsInput struct {
 }
 
 type ResourceModelServingAiGatewayGuardrailsOutputPii struct {
-	Behavior string `json:"behavior"`
+	Behavior string `json:"behavior,omitempty"`
 }
 
 type ResourceModelServingAiGatewayGuardrailsOutput struct {
@@ -94,8 +94,8 @@ type ResourceModelServingConfigServedEntitiesExternalModelDatabricksModelServing
 type ResourceModelServingConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig struct {
 	PrivateKey          string `json:"private_key,omitempty"`
 	PrivateKeyPlaintext string `json:"private_key_plaintext,omitempty"`
-	ProjectId           string `json:"project_id,omitempty"`
-	Region              string `json:"region,omitempty"`
+	ProjectId           string `json:"project_id"`
+	Region              string `json:"region"`
 }
 
 type ResourceModelServingConfigServedEntitiesExternalModelOpenaiConfig struct {
diff --git a/bundle/internal/tf/schema/resource_recipient.go b/bundle/internal/tf/schema/resource_recipient.go
index 91de4df76..4c8f2c7e7 100644
--- a/bundle/internal/tf/schema/resource_recipient.go
+++ b/bundle/internal/tf/schema/resource_recipient.go
@@ -29,6 +29,7 @@ type ResourceRecipient struct {
 	CreatedAt                      int                                 `json:"created_at,omitempty"`
 	CreatedBy                      string                              `json:"created_by,omitempty"`
 	DataRecipientGlobalMetastoreId string                              `json:"data_recipient_global_metastore_id,omitempty"`
+	ExpirationTime                 int                                 `json:"expiration_time,omitempty"`
 	Id                             string                              `json:"id,omitempty"`
 	MetastoreId                    string                              `json:"metastore_id,omitempty"`
 	Name                           string                              `json:"name"`
diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go
index 7dd3f9210..2ac852355 100644
--- a/bundle/internal/tf/schema/root.go
+++ b/bundle/internal/tf/schema/root.go
@@ -21,7 +21,7 @@ type Root struct {
 
 const ProviderHost = "registry.terraform.io"
 const ProviderSource = "databricks/databricks"
-const ProviderVersion = "1.63.0"
+const ProviderVersion = "1.64.1"
 
 func NewRoot() *Root {
 	return &Root{

From 413ca5c13471d007edc607a815850f0b31dc32cb Mon Sep 17 00:00:00 2001
From: Andrew Nester <andrew.nester@databricks.com>
Date: Tue, 28 Jan 2025 18:17:37 +0100
Subject: [PATCH 37/39] Do not wait for app compute to start on `bundle deploy`
 (#2144)

## Changes
This allows DABs to avoid waiting for the compute to start when app is
initially created as part of "bundle deploy" which significantly
improves deploy time.

Always set no_compute to true for apps

## Tests
Covered by `TestDeployBundleWithApp`, currently fails until TF provider
is upgraded to the version supporting `no_compute` option
---
 bundle/apps/slow_deploy_message.go            | 29 -------------------
 bundle/deploy/terraform/tfdyn/convert_app.go  |  6 ++++
 .../terraform/tfdyn/convert_app_test.go       |  2 ++
 bundle/phases/deploy.go                       |  1 -
 integration/bundle/apps_test.go               | 11 ++++---
 .../bundle/testdata/apps/bundle_deploy.txt    |  1 -
 6 files changed, 13 insertions(+), 37 deletions(-)
 delete mode 100644 bundle/apps/slow_deploy_message.go

diff --git a/bundle/apps/slow_deploy_message.go b/bundle/apps/slow_deploy_message.go
deleted file mode 100644
index 87275980a..000000000
--- a/bundle/apps/slow_deploy_message.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package apps
-
-import (
-	"context"
-
-	"github.com/databricks/cli/bundle"
-	"github.com/databricks/cli/libs/cmdio"
-	"github.com/databricks/cli/libs/diag"
-)
-
-type slowDeployMessage struct{}
-
-// TODO: needs to be removed when when no_compute option becomes available in TF provider and used in DABs
-// See https://github.com/databricks/cli/pull/2144
-func (v *slowDeployMessage) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
-	if len(b.Config.Resources.Apps) > 0 {
-		cmdio.LogString(ctx, "Note: Databricks apps included in this bundle may increase initial deployment time due to compute provisioning.")
-	}
-
-	return nil
-}
-
-func (v *slowDeployMessage) Name() string {
-	return "apps.SlowDeployMessage"
-}
-
-func SlowDeployMessage() bundle.Mutator {
-	return &slowDeployMessage{}
-}
diff --git a/bundle/deploy/terraform/tfdyn/convert_app.go b/bundle/deploy/terraform/tfdyn/convert_app.go
index dcba0809b..b3d599f15 100644
--- a/bundle/deploy/terraform/tfdyn/convert_app.go
+++ b/bundle/deploy/terraform/tfdyn/convert_app.go
@@ -38,6 +38,12 @@ func (appConverter) Convert(ctx context.Context, key string, vin dyn.Value, out
 		return err
 	}
 
+	// We always set no_compute to true as it allows DABs not to wait for app compute to be started when app is created.
+	vout, err = dyn.Set(vout, "no_compute", dyn.V(true))
+	if err != nil {
+		return err
+	}
+
 	// Add the converted resource to the output.
 	out.App[key] = vout.AsAny()
 
diff --git a/bundle/deploy/terraform/tfdyn/convert_app_test.go b/bundle/deploy/terraform/tfdyn/convert_app_test.go
index be8152cc6..cdf56f8ed 100644
--- a/bundle/deploy/terraform/tfdyn/convert_app_test.go
+++ b/bundle/deploy/terraform/tfdyn/convert_app_test.go
@@ -63,6 +63,7 @@ func TestConvertApp(t *testing.T) {
 	assert.Equal(t, map[string]any{
 		"description": "app description",
 		"name":        "app_id",
+		"no_compute":  true,
 		"resources": []any{
 			map[string]any{
 				"name": "job1",
@@ -136,6 +137,7 @@ func TestConvertAppWithNoDescription(t *testing.T) {
 	assert.Equal(t, map[string]any{
 		"name":        "app_id",
 		"description": "", // Due to Apps API always returning a description field, we set it in the output as well to avoid permanent TF drift
+		"no_compute":  true,
 		"resources": []any{
 			map[string]any{
 				"name": "job1",
diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go
index b59ce9f89..c6ec04962 100644
--- a/bundle/phases/deploy.go
+++ b/bundle/phases/deploy.go
@@ -130,7 +130,6 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
 	// mutators need informed consent if they are potentially destructive.
 	deployCore := bundle.Defer(
 		bundle.Seq(
-			apps.SlowDeployMessage(),
 			bundle.LogString("Deploying resources..."),
 			terraform.Apply(),
 		),
diff --git a/integration/bundle/apps_test.go b/integration/bundle/apps_test.go
index 23cd784be..01ab52e90 100644
--- a/integration/bundle/apps_test.go
+++ b/integration/bundle/apps_test.go
@@ -18,12 +18,6 @@ import (
 func TestDeployBundleWithApp(t *testing.T) {
 	ctx, wt := acc.WorkspaceTest(t)
 
-	// TODO: should only skip app run when app can be created with no_compute option.
-	if testing.Short() {
-		t.Log("Skip the app creation and run in short mode")
-		return
-	}
-
 	if testutil.GetCloud(t) == testutil.GCP {
 		t.Skip("Skipping test for GCP cloud because /api/2.0/apps is temporarily unavailable there.")
 	}
@@ -106,6 +100,11 @@ env:
   - name: JOB_ID
     value: "%d"`, job.JobId))
 
+	if testing.Short() {
+		t.Log("Skip the app run in short mode")
+		return
+	}
+
 	// Try to run the app
 	_, out := runResourceWithStderr(t, ctx, root, "test_app")
 	require.Contains(t, out, app.Url)
diff --git a/integration/bundle/testdata/apps/bundle_deploy.txt b/integration/bundle/testdata/apps/bundle_deploy.txt
index b077f327d..211164174 100644
--- a/integration/bundle/testdata/apps/bundle_deploy.txt
+++ b/integration/bundle/testdata/apps/bundle_deploy.txt
@@ -1,5 +1,4 @@
 Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ/files...
-Note: Databricks apps included in this bundle may increase initial deployment time due to compute provisioning.
 Deploying resources...
 Updating deployment state...
 Deployment complete!

From 124515e8d2105a3d2ec071dadcb30bf792ba9cad Mon Sep 17 00:00:00 2001
From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com>
Date: Wed, 29 Jan 2025 16:12:21 +0530
Subject: [PATCH 38/39] Move TestServer from acceptance to libs/testserver
 (#2255)

## Changes
Just a move, no changes. As recommended here:
https://github.com/databricks/cli/pull/2226#discussion_r1932152627

## Tests
N/A
---
 acceptance/acceptance_test.go |  3 +-
 acceptance/cmd_server_test.go |  3 +-
 acceptance/server_test.go     | 56 +++----------------------------
 libs/testserver/server.go     | 63 +++++++++++++++++++++++++++++++++++
 4 files changed, 71 insertions(+), 54 deletions(-)
 create mode 100644 libs/testserver/server.go

diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go
index b4b27f201..91ad09e9e 100644
--- a/acceptance/acceptance_test.go
+++ b/acceptance/acceptance_test.go
@@ -20,6 +20,7 @@ import (
 	"github.com/databricks/cli/internal/testutil"
 	"github.com/databricks/cli/libs/env"
 	"github.com/databricks/cli/libs/testdiff"
+	"github.com/databricks/cli/libs/testserver"
 	"github.com/databricks/databricks-sdk-go"
 	"github.com/stretchr/testify/require"
 )
@@ -107,7 +108,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
 	cloudEnv := os.Getenv("CLOUD_ENV")
 
 	if cloudEnv == "" {
-		server := StartServer(t)
+		server := testserver.New(t)
 		AddHandlers(server)
 		// Redirect API access to local server:
 		t.Setenv("DATABRICKS_HOST", server.URL)
diff --git a/acceptance/cmd_server_test.go b/acceptance/cmd_server_test.go
index 28feec1bd..3f5a6356e 100644
--- a/acceptance/cmd_server_test.go
+++ b/acceptance/cmd_server_test.go
@@ -8,10 +8,11 @@ import (
 	"testing"
 
 	"github.com/databricks/cli/internal/testcli"
+	"github.com/databricks/cli/libs/testserver"
 	"github.com/stretchr/testify/require"
 )
 
-func StartCmdServer(t *testing.T) *TestServer {
+func StartCmdServer(t *testing.T) *testserver.Server {
 	server := StartServer(t)
 	server.Handle("/", func(r *http.Request) (any, error) {
 		q := r.URL.Query()
diff --git a/acceptance/server_test.go b/acceptance/server_test.go
index dbc55c03f..66de5dcbf 100644
--- a/acceptance/server_test.go
+++ b/acceptance/server_test.go
@@ -1,73 +1,25 @@
 package acceptance_test
 
 import (
-	"encoding/json"
 	"net/http"
-	"net/http/httptest"
 	"testing"
 
+	"github.com/databricks/cli/libs/testserver"
 	"github.com/databricks/databricks-sdk-go/service/catalog"
 	"github.com/databricks/databricks-sdk-go/service/compute"
 	"github.com/databricks/databricks-sdk-go/service/iam"
 	"github.com/databricks/databricks-sdk-go/service/workspace"
 )
 
-type TestServer struct {
-	*httptest.Server
-	Mux *http.ServeMux
-}
-
-type HandlerFunc func(r *http.Request) (any, error)
-
-func NewTestServer() *TestServer {
-	mux := http.NewServeMux()
-	server := httptest.NewServer(mux)
-
-	return &TestServer{
-		Server: server,
-		Mux:    mux,
-	}
-}
-
-func (s *TestServer) Handle(pattern string, handler HandlerFunc) {
-	s.Mux.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) {
-		resp, err := handler(r)
-		if err != nil {
-			http.Error(w, err.Error(), http.StatusInternalServerError)
-			return
-		}
-
-		w.Header().Set("Content-Type", "application/json")
-
-		var respBytes []byte
-
-		respString, ok := resp.(string)
-		if ok {
-			respBytes = []byte(respString)
-		} else {
-			respBytes, err = json.MarshalIndent(resp, "", "    ")
-			if err != nil {
-				http.Error(w, err.Error(), http.StatusInternalServerError)
-				return
-			}
-		}
-
-		if _, err := w.Write(respBytes); err != nil {
-			http.Error(w, err.Error(), http.StatusInternalServerError)
-			return
-		}
-	})
-}
-
-func StartServer(t *testing.T) *TestServer {
-	server := NewTestServer()
+func StartServer(t *testing.T) *testserver.Server {
+	server := testserver.New(t)
 	t.Cleanup(func() {
 		server.Close()
 	})
 	return server
 }
 
-func AddHandlers(server *TestServer) {
+func AddHandlers(server *testserver.Server) {
 	server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
 		return compute.ListPoliciesResponse{
 			Policies: []compute.Policy{
diff --git a/libs/testserver/server.go b/libs/testserver/server.go
new file mode 100644
index 000000000..10269af8f
--- /dev/null
+++ b/libs/testserver/server.go
@@ -0,0 +1,63 @@
+package testserver
+
+import (
+	"encoding/json"
+	"net/http"
+	"net/http/httptest"
+
+	"github.com/databricks/cli/internal/testutil"
+)
+
+type Server struct {
+	*httptest.Server
+	Mux *http.ServeMux
+
+	t testutil.TestingT
+}
+
+func New(t testutil.TestingT) *Server {
+	mux := http.NewServeMux()
+	server := httptest.NewServer(mux)
+
+	return &Server{
+		Server: server,
+		Mux:    mux,
+		t:      t,
+	}
+}
+
+type HandlerFunc func(req *http.Request) (resp any, err error)
+
+func (s *Server) Close() {
+	s.Server.Close()
+}
+
+func (s *Server) Handle(pattern string, handler HandlerFunc) {
+	s.Mux.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) {
+		resp, err := handler(r)
+		if err != nil {
+			http.Error(w, err.Error(), http.StatusInternalServerError)
+			return
+		}
+
+		w.Header().Set("Content-Type", "application/json")
+
+		var respBytes []byte
+
+		respString, ok := resp.(string)
+		if ok {
+			respBytes = []byte(respString)
+		} else {
+			respBytes, err = json.MarshalIndent(resp, "", "    ")
+			if err != nil {
+				http.Error(w, err.Error(), http.StatusInternalServerError)
+				return
+			}
+		}
+
+		if _, err := w.Write(respBytes); err != nil {
+			http.Error(w, err.Error(), http.StatusInternalServerError)
+			return
+		}
+	})
+}

From 884b5f26ed148c431a0dfea6333bff9b293f8ed1 Mon Sep 17 00:00:00 2001
From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com>
Date: Wed, 29 Jan 2025 16:32:08 +0530
Subject: [PATCH 39/39] Set bundle auth configuration in command context
 (#2195)

## Changes

This change is required to enable tracking execution time telemetry for
bundle commands. In order to track execution time for the command
generally, we need to have the databricks auth configuration available
at this section of the code:


https://github.com/databricks/cli/blob/41bbd89257285707b3c3df9b9e5b92d6bcf8f1d1/cmd/root/root.go#L99

In order to do this we can rely on the `configUsed` context key.

Most commands rely on the `root.MustWorkspaceClient` function which
automatically sets the client config in the `configUsed` context key.
Bundle commands, however, do not do so. They instead store their
workspace clients in the `&bundle.Bundle{}` object.

With this PR, the `configUsed` context key will be set for all `bundle`
commands. Functionally nothing changes.

## Tests
Existing tests. Also manually verified that either
`root.MustConfigureBundle` or `utils.ConfigureBundleWithVariables` is
called for all bundle commands (except `bundle init`) thus ensuring this
context key would be set for all bundle commands.

refs for the functions:
1. `root.MustConfigureBundle`:
https://github.com/databricks/cli/blob/41bbd89257285707b3c3df9b9e5b92d6bcf8f1d1/cmd/root/bundle.go#L88
2. `utils.ConfigureBundleWithVariables`:
https://github.com/databricks/cli/blob/41bbd89257285707b3c3df9b9e5b92d6bcf8f1d1/cmd/bundle/utils/utils.go#L19

---------

Co-authored-by: Pieter Noordhuis <pieter.noordhuis@databricks.com>
---
 bundle/bundle.go                              | 31 ++++---
 .../mutator/initialize_workspace_client.go    | 26 ------
 bundle/phases/initialize.go                   |  1 -
 cmd/root/auth.go                              |  2 +-
 cmd/root/bundle.go                            | 16 ++++
 cmd/root/bundle_test.go                       | 85 ++++++++-----------
 6 files changed, 69 insertions(+), 92 deletions(-)
 delete mode 100644 bundle/config/mutator/initialize_workspace_client.go

diff --git a/bundle/bundle.go b/bundle/bundle.go
index e715b8b2c..9cb8916f5 100644
--- a/bundle/bundle.go
+++ b/bundle/bundle.go
@@ -72,6 +72,7 @@ type Bundle struct {
 	// It can be initialized on demand after loading the configuration.
 	clientOnce sync.Once
 	client     *databricks.WorkspaceClient
+	clientErr  error
 
 	// Files that are synced to the workspace.file_path
 	Files []fileset.File
@@ -134,23 +135,25 @@ func TryLoad(ctx context.Context) (*Bundle, error) {
 	return Load(ctx, root)
 }
 
-func (b *Bundle) InitializeWorkspaceClient() (*databricks.WorkspaceClient, error) {
-	client, err := b.Config.Workspace.Client()
-	if err != nil {
-		return nil, fmt.Errorf("cannot resolve bundle auth configuration: %w", err)
-	}
-	return client, nil
+func (b *Bundle) WorkspaceClientE() (*databricks.WorkspaceClient, error) {
+	b.clientOnce.Do(func() {
+		var err error
+		b.client, err = b.Config.Workspace.Client()
+		if err != nil {
+			b.clientErr = fmt.Errorf("cannot resolve bundle auth configuration: %w", err)
+		}
+	})
+
+	return b.client, b.clientErr
 }
 
 func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient {
-	b.clientOnce.Do(func() {
-		var err error
-		b.client, err = b.InitializeWorkspaceClient()
-		if err != nil {
-			panic(err)
-		}
-	})
-	return b.client
+	client, err := b.WorkspaceClientE()
+	if err != nil {
+		panic(err)
+	}
+
+	return client
 }
 
 // SetWorkpaceClient sets the workspace client for this bundle.
diff --git a/bundle/config/mutator/initialize_workspace_client.go b/bundle/config/mutator/initialize_workspace_client.go
deleted file mode 100644
index 5c905f40c..000000000
--- a/bundle/config/mutator/initialize_workspace_client.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package mutator
-
-import (
-	"context"
-
-	"github.com/databricks/cli/bundle"
-	"github.com/databricks/cli/libs/diag"
-)
-
-type initializeWorkspaceClient struct{}
-
-func InitializeWorkspaceClient() bundle.Mutator {
-	return &initializeWorkspaceClient{}
-}
-
-func (m *initializeWorkspaceClient) Name() string {
-	return "InitializeWorkspaceClient"
-}
-
-// Apply initializes the workspace client for the bundle. We do this here so
-// downstream calls to b.WorkspaceClient() do not panic if there's an error in the
-// auth configuration.
-func (m *initializeWorkspaceClient) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
-	_, err := b.InitializeWorkspaceClient()
-	return diag.FromErr(err)
-}
diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go
index c5b875196..afd6def3f 100644
--- a/bundle/phases/initialize.go
+++ b/bundle/phases/initialize.go
@@ -34,7 +34,6 @@ func Initialize() bundle.Mutator {
 			// If it is an ancestor, this updates all paths to be relative to the sync root path.
 			mutator.SyncInferRoot(),
 
-			mutator.InitializeWorkspaceClient(),
 			mutator.PopulateCurrentUser(),
 			mutator.LoadGitDetails(),
 
diff --git a/cmd/root/auth.go b/cmd/root/auth.go
index 49abfd414..4fcfbb4d8 100644
--- a/cmd/root/auth.go
+++ b/cmd/root/auth.go
@@ -209,7 +209,7 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error {
 		if b != nil {
 			ctx = context.WithValue(ctx, &configUsed, b.Config.Workspace.Config())
 			cmd.SetContext(ctx)
-			client, err := b.InitializeWorkspaceClient()
+			client, err := b.WorkspaceClientE()
 			if err != nil {
 				return err
 			}
diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go
index 8b98f2cf2..5842526f3 100644
--- a/cmd/root/bundle.go
+++ b/cmd/root/bundle.go
@@ -81,6 +81,22 @@ func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag
 
 	// Configure the workspace profile if the flag has been set.
 	diags = diags.Extend(configureProfile(cmd, b))
+	if diags.HasError() {
+		return b, diags
+	}
+
+	// Set the auth configuration in the command context. This can be used
+	// downstream to initialize a API client.
+	//
+	// Note that just initializing a workspace client and loading auth configuration
+	// is a fast operation. It does not perform network I/O or invoke processes (for example the Azure CLI).
+	client, err := b.WorkspaceClientE()
+	if err != nil {
+		return b, diags.Extend(diag.FromErr(err))
+	}
+	ctx = context.WithValue(ctx, &configUsed, client.Config)
+	cmd.SetContext(ctx)
+
 	return b, diags
 }
 
diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go
index 1998b19e6..3517b02e4 100644
--- a/cmd/root/bundle_test.go
+++ b/cmd/root/bundle_test.go
@@ -8,7 +8,6 @@ import (
 	"runtime"
 	"testing"
 
-	"github.com/databricks/cli/bundle"
 	"github.com/databricks/cli/internal/testutil"
 	"github.com/spf13/cobra"
 	"github.com/stretchr/testify/assert"
@@ -38,7 +37,7 @@ func emptyCommand(t *testing.T) *cobra.Command {
 	return cmd
 }
 
-func setupWithHost(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle {
+func setupWithHost(t *testing.T, cmd *cobra.Command, host string) error {
 	setupDatabricksCfg(t)
 
 	rootPath := t.TempDir()
@@ -51,12 +50,11 @@ workspace:
 	err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0o644)
 	require.NoError(t, err)
 
-	b, diags := MustConfigureBundle(cmd)
-	require.NoError(t, diags.Error())
-	return b
+	_, diags := MustConfigureBundle(cmd)
+	return diags.Error()
 }
 
-func setupWithProfile(t *testing.T, cmd *cobra.Command, profile string) *bundle.Bundle {
+func setupWithProfile(t *testing.T, cmd *cobra.Command, profile string) error {
 	setupDatabricksCfg(t)
 
 	rootPath := t.TempDir()
@@ -69,29 +67,25 @@ workspace:
 	err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0o644)
 	require.NoError(t, err)
 
-	b, diags := MustConfigureBundle(cmd)
-	require.NoError(t, diags.Error())
-	return b
+	_, diags := MustConfigureBundle(cmd)
+	return diags.Error()
 }
 
 func TestBundleConfigureDefault(t *testing.T) {
 	testutil.CleanupEnvironment(t)
 
 	cmd := emptyCommand(t)
-	b := setupWithHost(t, cmd, "https://x.com")
-
-	client, err := b.InitializeWorkspaceClient()
+	err := setupWithHost(t, cmd, "https://x.com")
 	require.NoError(t, err)
-	assert.Equal(t, "https://x.com", client.Config.Host)
+
+	assert.Equal(t, "https://x.com", ConfigUsed(cmd.Context()).Host)
 }
 
 func TestBundleConfigureWithMultipleMatches(t *testing.T) {
 	testutil.CleanupEnvironment(t)
 
 	cmd := emptyCommand(t)
-	b := setupWithHost(t, cmd, "https://a.com")
-
-	_, err := b.InitializeWorkspaceClient()
+	err := setupWithHost(t, cmd, "https://a.com")
 	assert.ErrorContains(t, err, "multiple profiles matched: PROFILE-1, PROFILE-2")
 }
 
@@ -101,9 +95,8 @@ func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) {
 	cmd := emptyCommand(t)
 	err := cmd.Flag("profile").Value.Set("NOEXIST")
 	require.NoError(t, err)
-	b := setupWithHost(t, cmd, "https://x.com")
 
-	_, err = b.InitializeWorkspaceClient()
+	err = setupWithHost(t, cmd, "https://x.com")
 	assert.ErrorContains(t, err, "has no NOEXIST profile configured")
 }
 
@@ -113,9 +106,8 @@ func TestBundleConfigureWithMismatchedProfile(t *testing.T) {
 	cmd := emptyCommand(t)
 	err := cmd.Flag("profile").Value.Set("PROFILE-1")
 	require.NoError(t, err)
-	b := setupWithHost(t, cmd, "https://x.com")
 
-	_, err = b.InitializeWorkspaceClient()
+	err = setupWithHost(t, cmd, "https://x.com")
 	assert.ErrorContains(t, err, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com")
 }
 
@@ -125,12 +117,11 @@ func TestBundleConfigureWithCorrectProfile(t *testing.T) {
 	cmd := emptyCommand(t)
 	err := cmd.Flag("profile").Value.Set("PROFILE-1")
 	require.NoError(t, err)
-	b := setupWithHost(t, cmd, "https://a.com")
+	err = setupWithHost(t, cmd, "https://a.com")
 
-	client, err := b.InitializeWorkspaceClient()
 	require.NoError(t, err)
-	assert.Equal(t, "https://a.com", client.Config.Host)
-	assert.Equal(t, "PROFILE-1", client.Config.Profile)
+	assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
+	assert.Equal(t, "PROFILE-1", ConfigUsed(cmd.Context()).Profile)
 }
 
 func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) {
@@ -138,9 +129,8 @@ func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) {
 
 	t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-1")
 	cmd := emptyCommand(t)
-	b := setupWithHost(t, cmd, "https://x.com")
 
-	_, err := b.InitializeWorkspaceClient()
+	err := setupWithHost(t, cmd, "https://x.com")
 	assert.ErrorContains(t, err, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com")
 }
 
@@ -151,12 +141,11 @@ func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) {
 	cmd := emptyCommand(t)
 	err := cmd.Flag("profile").Value.Set("PROFILE-1")
 	require.NoError(t, err)
-	b := setupWithHost(t, cmd, "https://a.com")
 
-	client, err := b.InitializeWorkspaceClient()
+	err = setupWithHost(t, cmd, "https://a.com")
 	require.NoError(t, err)
-	assert.Equal(t, "https://a.com", client.Config.Host)
-	assert.Equal(t, "PROFILE-1", client.Config.Profile)
+	assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
+	assert.Equal(t, "PROFILE-1", ConfigUsed(cmd.Context()).Profile)
 }
 
 func TestBundleConfigureProfileDefault(t *testing.T) {
@@ -164,13 +153,12 @@ func TestBundleConfigureProfileDefault(t *testing.T) {
 
 	// The profile in the databricks.yml file is used
 	cmd := emptyCommand(t)
-	b := setupWithProfile(t, cmd, "PROFILE-1")
 
-	client, err := b.InitializeWorkspaceClient()
+	err := setupWithProfile(t, cmd, "PROFILE-1")
 	require.NoError(t, err)
-	assert.Equal(t, "https://a.com", client.Config.Host)
-	assert.Equal(t, "a", client.Config.Token)
-	assert.Equal(t, "PROFILE-1", client.Config.Profile)
+	assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
+	assert.Equal(t, "a", ConfigUsed(cmd.Context()).Token)
+	assert.Equal(t, "PROFILE-1", ConfigUsed(cmd.Context()).Profile)
 }
 
 func TestBundleConfigureProfileFlag(t *testing.T) {
@@ -180,13 +168,12 @@ func TestBundleConfigureProfileFlag(t *testing.T) {
 	cmd := emptyCommand(t)
 	err := cmd.Flag("profile").Value.Set("PROFILE-2")
 	require.NoError(t, err)
-	b := setupWithProfile(t, cmd, "PROFILE-1")
 
-	client, err := b.InitializeWorkspaceClient()
+	err = setupWithProfile(t, cmd, "PROFILE-1")
 	require.NoError(t, err)
-	assert.Equal(t, "https://a.com", client.Config.Host)
-	assert.Equal(t, "b", client.Config.Token)
-	assert.Equal(t, "PROFILE-2", client.Config.Profile)
+	assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
+	assert.Equal(t, "b", ConfigUsed(cmd.Context()).Token)
+	assert.Equal(t, "PROFILE-2", ConfigUsed(cmd.Context()).Profile)
 }
 
 func TestBundleConfigureProfileEnvVariable(t *testing.T) {
@@ -195,13 +182,12 @@ func TestBundleConfigureProfileEnvVariable(t *testing.T) {
 	// The DATABRICKS_CONFIG_PROFILE environment variable takes precedence over the profile in the databricks.yml file
 	t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-2")
 	cmd := emptyCommand(t)
-	b := setupWithProfile(t, cmd, "PROFILE-1")
 
-	client, err := b.InitializeWorkspaceClient()
+	err := setupWithProfile(t, cmd, "PROFILE-1")
 	require.NoError(t, err)
-	assert.Equal(t, "https://a.com", client.Config.Host)
-	assert.Equal(t, "b", client.Config.Token)
-	assert.Equal(t, "PROFILE-2", client.Config.Profile)
+	assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
+	assert.Equal(t, "b", ConfigUsed(cmd.Context()).Token)
+	assert.Equal(t, "PROFILE-2", ConfigUsed(cmd.Context()).Profile)
 }
 
 func TestBundleConfigureProfileFlagAndEnvVariable(t *testing.T) {
@@ -212,13 +198,12 @@ func TestBundleConfigureProfileFlagAndEnvVariable(t *testing.T) {
 	cmd := emptyCommand(t)
 	err := cmd.Flag("profile").Value.Set("PROFILE-2")
 	require.NoError(t, err)
-	b := setupWithProfile(t, cmd, "PROFILE-1")
 
-	client, err := b.InitializeWorkspaceClient()
+	err = setupWithProfile(t, cmd, "PROFILE-1")
 	require.NoError(t, err)
-	assert.Equal(t, "https://a.com", client.Config.Host)
-	assert.Equal(t, "b", client.Config.Token)
-	assert.Equal(t, "PROFILE-2", client.Config.Profile)
+	assert.Equal(t, "https://a.com", ConfigUsed(cmd.Context()).Host)
+	assert.Equal(t, "b", ConfigUsed(cmd.Context()).Token)
+	assert.Equal(t, "PROFILE-2", ConfigUsed(cmd.Context()).Profile)
 }
 
 func TestTargetFlagFull(t *testing.T) {