From d784147e994f71ea7b4e30a02daea66e73baea10 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 23 Jan 2025 16:54:55 +0100 Subject: [PATCH 01/25] [Release] Release v0.239.1 (#2218) CLI: * Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)). * Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)). Bundles: * Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)). * Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)). * Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)). * fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)). * Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)). * Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)). * Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)). * Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)). * Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)). * Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)). --- CHANGELOG.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 53392e5db..255bfb0a8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Version changelog +## [Release] Release v0.239.1 + +CLI: + * Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)). + * Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)). + +Bundles: + * Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)). + * Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)). + * Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)). + * fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)). + * Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)). + * Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)). + * Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)). + * Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)). + * Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)). + * Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)). + + + ## [Release] Release v0.239.0 ### New feature announcement From d6d9b994d46bdd11bdc17f215a4138558b626457 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 24 Jan 2025 10:47:12 +0100 Subject: [PATCH 02/25] acc: only print non-zero exit codes in errcode function (#2222) Reduce noise in the output and matches how "Exit code" is handled for the whole script. --- acceptance/bundle/paths/fallback/output.txt | 2 -- acceptance/bundle/paths/nominal/output.txt | 2 -- acceptance/bundle/variables/arg-repeat/output.txt | 2 -- acceptance/script.prepare | 4 +++- 4 files changed, 3 insertions(+), 7 deletions(-) diff --git a/acceptance/bundle/paths/fallback/output.txt b/acceptance/bundle/paths/fallback/output.txt index f694610d2..63121f3d7 100644 --- a/acceptance/bundle/paths/fallback/output.txt +++ b/acceptance/bundle/paths/fallback/output.txt @@ -1,8 +1,6 @@ >>> $CLI bundle validate -t development -o json -Exit code: 0 - >>> $CLI bundle validate -t error Error: notebook this value is overridden not found. Local notebook references are expected to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb] diff --git a/acceptance/bundle/paths/nominal/output.txt b/acceptance/bundle/paths/nominal/output.txt index 189170335..1badcdec6 100644 --- a/acceptance/bundle/paths/nominal/output.txt +++ b/acceptance/bundle/paths/nominal/output.txt @@ -1,8 +1,6 @@ >>> $CLI bundle validate -t development -o json -Exit code: 0 - >>> $CLI bundle validate -t error Error: notebook this value is overridden not found. Local notebook references are expected to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb] diff --git a/acceptance/bundle/variables/arg-repeat/output.txt b/acceptance/bundle/variables/arg-repeat/output.txt index 48bd2033f..2f9de1a3c 100644 --- a/acceptance/bundle/variables/arg-repeat/output.txt +++ b/acceptance/bundle/variables/arg-repeat/output.txt @@ -1,7 +1,5 @@ >>> errcode $CLI bundle validate --var a=one -o json - -Exit code: 0 { "a": { "default": "hello", diff --git a/acceptance/script.prepare b/acceptance/script.prepare index 0567e433a..87910654d 100644 --- a/acceptance/script.prepare +++ b/acceptance/script.prepare @@ -6,7 +6,9 @@ errcode() { local exit_code=$? # Re-enable 'set -e' if it was previously set set -e - >&2 printf "\nExit code: $exit_code\n" + if [ $exit_code -ne 0 ]; then + >&2 printf "\nExit code: $exit_code\n" + fi } trace() { From b4ed23510490bcc16e15990c210598341d4657a6 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 24 Jan 2025 11:18:44 +0100 Subject: [PATCH 03/25] Include EvalSymlinks in SetPath and use SetPath on all paths (#2219) ## Changes When adding path, a few things should take care of: - symlink expansion - forward/backward slashes, so that tests could do sed 's/\\\\/\//g' to make it pass on Windows (see acceptance/bundle/syncroot/dotdot-git/script) SetPath() function takes care of both. This PR uses SetPath() on all paths consistently. ## Tests Existing tests. --- acceptance/acceptance_test.go | 9 ++------- libs/testdiff/replacement.go | 16 ++++++++++++++-- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 96c1f651c..0e7877dcf 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -93,13 +93,13 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int { } t.Setenv("CLI", execPath) - repls.Set(execPath, "$CLI") + repls.SetPath(execPath, "$CLI") // Make helper scripts available t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH"))) tempHomeDir := t.TempDir() - repls.Set(tempHomeDir, "$TMPHOME") + repls.SetPath(tempHomeDir, "$TMPHOME") t.Logf("$TMPHOME=%v", tempHomeDir) // Prevent CLI from downloading terraform in each test: @@ -187,11 +187,6 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont tmpDir = t.TempDir() } - // Converts C:\Users\DENIS~1.BIL -> C:\Users\denis.bilenko - tmpDirEvalled, err1 := filepath.EvalSymlinks(tmpDir) - if err1 == nil && tmpDirEvalled != tmpDir { - repls.SetPathWithParents(tmpDirEvalled, "$TMPDIR") - } repls.SetPathWithParents(tmpDir, "$TMPDIR") scriptContents := readMergedScriptContents(t, dir) diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go index ca76b159c..865192662 100644 --- a/libs/testdiff/replacement.go +++ b/libs/testdiff/replacement.go @@ -94,6 +94,18 @@ func trimQuotes(s string) string { } func (r *ReplacementsContext) SetPath(old, new string) { + if old != "" && old != "." { + // Converts C:\Users\DENIS~1.BIL -> C:\Users\denis.bilenko + oldEvalled, err1 := filepath.EvalSymlinks(old) + if err1 == nil && oldEvalled != old { + r.SetPathNoEval(oldEvalled, new) + } + } + + r.SetPathNoEval(old, new) +} + +func (r *ReplacementsContext) SetPathNoEval(old, new string) { r.Set(old, new) if runtime.GOOS != "windows" { @@ -133,7 +145,7 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont r.Set(w.Config.Token, "$DATABRICKS_TOKEN") r.Set(w.Config.Username, "$DATABRICKS_USERNAME") r.Set(w.Config.Password, "$DATABRICKS_PASSWORD") - r.Set(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE") + r.SetPath(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE") r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE") r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT") r.Set(w.Config.GoogleCredentials, "$GOOGLE_CREDENTIALS") @@ -147,7 +159,7 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT") r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID") r.Set(w.Config.ClientSecret, "$DATABRICKS_CLIENT_SECRET") - r.Set(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH") + r.SetPath(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH") // This is set to words like "path" that happen too frequently // r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE") } From a47a058506d874019887baea1006b587f47cbfdb Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Fri, 24 Jan 2025 16:35:00 +0530 Subject: [PATCH 04/25] Limit test server to only accept GET on read endpoints (#2225) ## Changes Now the test server will only match GET queries for these endpoints ## Tests Existing tests. --- acceptance/server_test.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/acceptance/server_test.go b/acceptance/server_test.go index 0d10fbea1..eb8cbb24a 100644 --- a/acceptance/server_test.go +++ b/acceptance/server_test.go @@ -68,7 +68,7 @@ func StartServer(t *testing.T) *TestServer { } func AddHandlers(server *TestServer) { - server.Handle("/api/2.0/policies/clusters/list", func(r *http.Request) (any, error) { + server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) { return compute.ListPoliciesResponse{ Policies: []compute.Policy{ { @@ -83,7 +83,7 @@ func AddHandlers(server *TestServer) { }, nil }) - server.Handle("/api/2.0/instance-pools/list", func(r *http.Request) (any, error) { + server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, error) { return compute.ListInstancePools{ InstancePools: []compute.InstancePoolAndStats{ { @@ -94,7 +94,7 @@ func AddHandlers(server *TestServer) { }, nil }) - server.Handle("/api/2.1/clusters/list", func(r *http.Request) (any, error) { + server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, error) { return compute.ListClustersResponse{ Clusters: []compute.ClusterDetails{ { @@ -109,13 +109,13 @@ func AddHandlers(server *TestServer) { }, nil }) - server.Handle("/api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) { + server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) { return iam.User{ UserName: "tester@databricks.com", }, nil }) - server.Handle("/api/2.0/workspace/get-status", func(r *http.Request) (any, error) { + server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, error) { return workspace.ObjectInfo{ ObjectId: 1001, ObjectType: "DIRECTORY", @@ -124,13 +124,13 @@ func AddHandlers(server *TestServer) { }, nil }) - server.Handle("/api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) { + server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) { return catalog.MetastoreAssignment{ DefaultCatalogName: "main", }, nil }) - server.Handle("/api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) { + server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) { return workspace.WorkspaceObjectPermissions{ ObjectId: "1001", ObjectType: "DIRECTORY", From 959e43e556b2fc775feaf5d519000afdad17a815 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 24 Jan 2025 15:28:23 +0100 Subject: [PATCH 05/25] acc: Support per-test configuration; GOOS option to disable OS (#2227) ## Changes - Acceptance tests load test.toml to configure test behaviour. - If file is not found in the test directory, parents are searched, until the test root. - Currently there is one option: runtime.GOOS to switch off tests per OS. ## Tests Using it in https://github.com/databricks/cli/pull/2223 to disable test on Windows that cannot be run there. --- NOTICE | 4 ++ acceptance/acceptance_test.go | 7 +++ acceptance/config_test.go | 99 +++++++++++++++++++++++++++++++++++ acceptance/test.toml | 2 + go.mod | 1 + go.sum | 2 + 6 files changed, 115 insertions(+) create mode 100644 acceptance/config_test.go create mode 100644 acceptance/test.toml diff --git a/NOTICE b/NOTICE index f6b59e0b0..ed22084cf 100644 --- a/NOTICE +++ b/NOTICE @@ -105,3 +105,7 @@ License - https://github.com/wI2L/jsondiff/blob/master/LICENSE https://github.com/hexops/gotextdiff Copyright (c) 2009 The Go Authors. All rights reserved. License - https://github.com/hexops/gotextdiff/blob/main/LICENSE + +https://github.com/BurntSushi/toml +Copyright (c) 2013 TOML authors +https://github.com/BurntSushi/toml/blob/master/COPYING diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 0e7877dcf..a1c41c5e6 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -175,6 +175,13 @@ func getTests(t *testing.T) []string { } func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) { + config, configPath := LoadConfig(t, dir) + + isEnabled, isPresent := config.GOOS[runtime.GOOS] + if isPresent && !isEnabled { + t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath) + } + var tmpDir string var err error if KeepTmp { diff --git a/acceptance/config_test.go b/acceptance/config_test.go new file mode 100644 index 000000000..49dce06ba --- /dev/null +++ b/acceptance/config_test.go @@ -0,0 +1,99 @@ +package acceptance_test + +import ( + "os" + "path/filepath" + "sync" + "testing" + + "github.com/BurntSushi/toml" + "github.com/stretchr/testify/require" +) + +const configFilename = "test.toml" + +var ( + configCache map[string]TestConfig + configMutex sync.Mutex +) + +type TestConfig struct { + // Place to describe what's wrong with this test. Does not affect how the test is run. + Badness string + + // Which OSes the test is enabled on. Each string is compared against runtime.GOOS. + // If absent, default to true. + GOOS map[string]bool +} + +// FindConfig finds the closest config file. +func FindConfig(t *testing.T, dir string) (string, bool) { + shared := false + for { + path := filepath.Join(dir, configFilename) + _, err := os.Stat(path) + + if err == nil { + return path, shared + } + + shared = true + + if dir == "" || dir == "." { + break + } + + if os.IsNotExist(err) { + dir = filepath.Dir(dir) + continue + } + + t.Fatalf("Error while reading %s: %s", path, err) + } + + t.Fatal("Config not found: " + configFilename) + return "", shared +} + +// LoadConfig loads the config file. Non-leaf configs are cached. +func LoadConfig(t *testing.T, dir string) (TestConfig, string) { + path, leafConfig := FindConfig(t, dir) + + if leafConfig { + return DoLoadConfig(t, path), path + } + + configMutex.Lock() + defer configMutex.Unlock() + + if configCache == nil { + configCache = make(map[string]TestConfig) + } + + result, ok := configCache[path] + if ok { + return result, path + } + + result = DoLoadConfig(t, path) + configCache[path] = result + return result, path +} + +func DoLoadConfig(t *testing.T, path string) TestConfig { + bytes, err := os.ReadFile(path) + if err != nil { + t.Fatalf("failed to read config: %s", err) + } + + var config TestConfig + meta, err := toml.Decode(string(bytes), &config) + require.NoError(t, err) + + keys := meta.Undecoded() + if len(keys) > 0 { + t.Fatalf("Undecoded keys in %s: %#v", path, keys) + } + + return config +} diff --git a/acceptance/test.toml b/acceptance/test.toml new file mode 100644 index 000000000..eee94d0ea --- /dev/null +++ b/acceptance/test.toml @@ -0,0 +1,2 @@ +# If test directory nor any of its parents do not have test.toml then this file serves as fallback configuration. +# The configurations are not merged across parents; the closest one is used fully. diff --git a/go.mod b/go.mod index 0ef800d7b..930963f89 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.23 toolchain go1.23.4 require ( + github.com/BurntSushi/toml v1.4.0 // MIT github.com/Masterminds/semver/v3 v3.3.1 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 github.com/databricks/databricks-sdk-go v0.55.0 // Apache 2.0 diff --git a/go.sum b/go.sum index b1364cb26..d025b3947 100644 --- a/go.sum +++ b/go.sum @@ -8,6 +8,8 @@ cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1h dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= +github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4= github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= From f65508690d92301e0f6e27ce76a46d28780272ea Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 24 Jan 2025 16:33:54 +0100 Subject: [PATCH 06/25] Update publish-winget action to use Komac directly (#2228) ## Changes For the most recent release, I had to re-run the "publish-winget" action a couple of times before it passed. The underlying issue that causes the failure should be solved by the latest version of the action, but upon inspection of the latest version, I found that it always installs the latest version of [Komac](https://github.com/russellbanks/Komac). To both fix the issue and lock this down further, I updated our action to call Komac directly instead of relying on a separate action to do this for us. ## Tests Successful run in https://github.com/databricks/cli/actions/runs/12951529979. --- .github/workflows/publish-winget.yml | 68 +++++++++++++++++++++++----- 1 file changed, 57 insertions(+), 11 deletions(-) diff --git a/.github/workflows/publish-winget.yml b/.github/workflows/publish-winget.yml index eb9a72eda..cbd24856b 100644 --- a/.github/workflows/publish-winget.yml +++ b/.github/workflows/publish-winget.yml @@ -10,19 +10,65 @@ on: jobs: publish-to-winget-pkgs: runs-on: - group: databricks-protected-runner-group - labels: windows-server-latest + group: databricks-deco-testing-runner-group + labels: ubuntu-latest-deco environment: release steps: - - uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # v2 - with: - identifier: Databricks.DatabricksCLI - installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases - token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }} - fork-user: eng-dev-ecosystem-bot + - name: Checkout repository and submodules + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - # Use the tag from the input, or the ref name if the input is not provided. - # The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command. - release-tag: ${{ inputs.tag || github.ref_name }} + # When updating the version of komac, make sure to update the checksum in the next step. + # Find both at https://github.com/russellbanks/Komac/releases. + - name: Download komac binary + run: | + curl -s -L -o $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz https://github.com/russellbanks/Komac/releases/download/v2.9.0/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz + + - name: Verify komac binary + run: | + echo "d07a12831ad5418fee715488542a98ce3c0e591d05c850dd149fe78432be8c4c $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz" | sha256sum -c - + + - name: Untar komac binary to temporary path + run: | + mkdir -p $RUNNER_TEMP/komac + tar -xzf $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz -C $RUNNER_TEMP/komac + + - name: Add komac to PATH + run: echo "$RUNNER_TEMP/komac" >> $GITHUB_PATH + + - name: Confirm komac version + run: komac --version + + # Use the tag from the input, or the ref name if the input is not provided. + # The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command. + - name: Strip "v" prefix from version + id: strip_version + run: echo "version=$(echo ${{ inputs.tag || github.ref_name }} | sed 's/^v//')" >> "$GITHUB_OUTPUT" + + - name: Get URLs of signed Windows binaries + id: get_windows_urls + run: | + urls=$( + gh api https://api.github.com/repos/databricks/cli/releases/tags/${{ inputs.tag || github.ref_name }} | \ + jq -r .assets[].browser_download_url | \ + grep -E '_windows_.*-signed\.zip$' | \ + tr '\n' ' ' + ) + if [ -z "$urls" ]; then + echo "No signed Windows binaries found" >&2 + exit 1 + fi + echo "urls=$urls" >> "$GITHUB_OUTPUT" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Publish to Winget + run: | + komac update Databricks.DatabricksCLI \ + --version ${{ steps.strip_version.outputs.version }} \ + --submit \ + --urls ${{ steps.get_windows_urls.outputs.urls }} \ + env: + KOMAC_FORK_OWNER: eng-dev-ecosystem-bot + GITHUB_TOKEN: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }} From 468660dc45bd1deac4d37fb914d4a6224aa1a27e Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 24 Jan 2025 16:53:06 +0100 Subject: [PATCH 07/25] Add an acc test covering failures when reading .git (#2223) ## Changes - New test covering failures in reading .git. One case results in error, some result in warning (not shown). - New helper withdir runs commands in a subdirectory. ## Tests New acceptance test. --- .../bundle/git-permerror/databricks.yml | 2 + acceptance/bundle/git-permerror/output.txt | 78 +++++++++++++++++++ acceptance/bundle/git-permerror/script | 25 ++++++ acceptance/bundle/git-permerror/test.toml | 5 ++ acceptance/script.prepare | 11 +++ 5 files changed, 121 insertions(+) create mode 100644 acceptance/bundle/git-permerror/databricks.yml create mode 100644 acceptance/bundle/git-permerror/output.txt create mode 100644 acceptance/bundle/git-permerror/script create mode 100644 acceptance/bundle/git-permerror/test.toml diff --git a/acceptance/bundle/git-permerror/databricks.yml b/acceptance/bundle/git-permerror/databricks.yml new file mode 100644 index 000000000..83e0acda8 --- /dev/null +++ b/acceptance/bundle/git-permerror/databricks.yml @@ -0,0 +1,2 @@ +bundle: + name: git-permerror diff --git a/acceptance/bundle/git-permerror/output.txt b/acceptance/bundle/git-permerror/output.txt new file mode 100644 index 000000000..2b52134ab --- /dev/null +++ b/acceptance/bundle/git-permerror/output.txt @@ -0,0 +1,78 @@ +=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case. + +>>> chmod 000 .git + +>>> $CLI bundle validate +Error: unable to load repository specific gitconfig: open config: permission denied + +Name: git-permerror +Target: default +Workspace: + User: $USERNAME + Path: /Workspace/Users/$USERNAME/.bundle/git-permerror/default + +Found 1 error + +Exit code: 1 + +>>> $CLI bundle validate -o json +Error: unable to load repository specific gitconfig: open config: permission denied + + +Exit code: 1 +{ + "bundle_root_path": ".", + "inferred": true +} + +>>> withdir subdir/a/b $CLI bundle validate -o json +Error: unable to load repository specific gitconfig: open config: permission denied + + +Exit code: 1 +{ + "bundle_root_path": ".", + "inferred": true +} + + +=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case. + +>>> chmod 000 .git/HEAD + +>>> $CLI bundle validate -o json +{ + "bundle_root_path": ".", + "inferred": true +} + +>>> withdir subdir/a/b $CLI bundle validate -o json +{ + "bundle_root_path": ".", + "inferred": true +} + + +=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case. + +>>> chmod 000 .git/config + +>>> $CLI bundle validate -o json +Error: unable to load repository specific gitconfig: open config: permission denied + + +Exit code: 1 +{ + "bundle_root_path": ".", + "inferred": true +} + +>>> withdir subdir/a/b $CLI bundle validate -o json +Error: unable to load repository specific gitconfig: open config: permission denied + + +Exit code: 1 +{ + "bundle_root_path": ".", + "inferred": true +} diff --git a/acceptance/bundle/git-permerror/script b/acceptance/bundle/git-permerror/script new file mode 100644 index 000000000..782cbf5bc --- /dev/null +++ b/acceptance/bundle/git-permerror/script @@ -0,0 +1,25 @@ +mkdir myrepo +cd myrepo +cp ../databricks.yml . +git-repo-init +mkdir -p subdir/a/b + +printf "=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.\n" +trace chmod 000 .git +errcode trace $CLI bundle validate +errcode trace $CLI bundle validate -o json | jq .bundle.git +errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git + +printf "\n\n=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.\n" +chmod 700 .git +trace chmod 000 .git/HEAD +errcode trace $CLI bundle validate -o json | jq .bundle.git +errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git + +printf "\n\n=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.\n" +chmod 666 .git/HEAD +trace chmod 000 .git/config +errcode trace $CLI bundle validate -o json | jq .bundle.git +errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git + +rm -fr .git diff --git a/acceptance/bundle/git-permerror/test.toml b/acceptance/bundle/git-permerror/test.toml new file mode 100644 index 000000000..3f96e551c --- /dev/null +++ b/acceptance/bundle/git-permerror/test.toml @@ -0,0 +1,5 @@ +Badness = "Warning logs not shown; inferred flag is set to true incorrect; bundle_root_path is not correct" + +[GOOS] +# This test relies on chmod which does not work on Windows +windows = false diff --git a/acceptance/script.prepare b/acceptance/script.prepare index 87910654d..b814a1260 100644 --- a/acceptance/script.prepare +++ b/acceptance/script.prepare @@ -47,3 +47,14 @@ title() { local label="$1" printf "\n=== %s" "$label" } + +withdir() { + local dir="$1" + shift + local orig_dir="$(pwd)" + cd "$dir" || return $? + "$@" + local exit_code=$? + cd "$orig_dir" || return $? + return $exit_code +} From b3d98fe66664cb85c750364afce9b1ea0785417f Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 08:45:09 +0100 Subject: [PATCH 08/25] acc: Print replacements on error and rm duplicates (#2230) ## Changes - File comparison files in acceptance test, print the contents of all applied replacements. Do it once per test. - Remove duplicate entries in replacement list. ## Tests Manually, change out files of existing test, you'll get this printed once, after first assertion: ``` acceptance_test.go:307: Available replacements: REPL /Users/denis\.bilenko/work/cli/acceptance/build/databricks => $$CLI REPL /private/var/folders/5y/9kkdnjw91p11vsqwk0cvmk200000gp/T/TestAccept598522733/001 => $$TMPHOME ... ``` --- acceptance/acceptance_test.go | 17 ++++++++++++++--- libs/testdiff/replacement.go | 6 +++++- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index a1c41c5e6..11fd3f2ee 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -228,9 +228,11 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont formatOutput(out, err) require.NoError(t, out.Close()) + printedRepls := false + // Compare expected outputs for relPath := range outputs { - doComparison(t, repls, dir, tmpDir, relPath) + doComparison(t, repls, dir, tmpDir, relPath, &printedRepls) } // Make sure there are not unaccounted for new files @@ -245,12 +247,12 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont if strings.HasPrefix(relPath, "out") { // We have a new file starting with "out" // Show the contents & support overwrite mode for it: - doComparison(t, repls, dir, tmpDir, relPath) + doComparison(t, repls, dir, tmpDir, relPath, &printedRepls) } } } -func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string) { +func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) { pathRef := filepath.Join(dirRef, relPath) pathNew := filepath.Join(dirNew, relPath) bufRef, okRef := readIfExists(t, pathRef) @@ -295,6 +297,15 @@ func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirN t.Logf("Overwriting existing output file: %s", relPath) testutil.WriteFile(t, pathRef, valueNew) } + + if !equal && printedRepls != nil && !*printedRepls { + *printedRepls = true + var items []string + for _, item := range repls.Repls { + items = append(items, fmt.Sprintf("REPL %s => %s", item.Old, item.New)) + } + t.Log("Available replacements:\n" + strings.Join(items, "\n")) + } } // Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ... diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go index 865192662..b512374a3 100644 --- a/libs/testdiff/replacement.go +++ b/libs/testdiff/replacement.go @@ -76,7 +76,11 @@ func (r *ReplacementsContext) Set(old, new string) { if err == nil { encodedOld, err := json.Marshal(old) if err == nil { - r.appendLiteral(trimQuotes(string(encodedOld)), trimQuotes(string(encodedNew))) + encodedStrNew := trimQuotes(string(encodedNew)) + encodedStrOld := trimQuotes(string(encodedOld)) + if encodedStrNew != new || encodedStrOld != old { + r.appendLiteral(encodedStrOld, encodedStrNew) + } } } From 82b0dd36d682b1b11260e05e8a5c6aeccb65c255 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 09:17:22 +0100 Subject: [PATCH 09/25] Add acceptance/selftest, showcasing basic features (#2229) Also make TestInprocessMode use this test. --- acceptance/README.md | 2 ++ acceptance/acceptance_test.go | 7 +------ acceptance/selftest/out.hello.txt | 1 + acceptance/selftest/output.txt | 30 ++++++++++++++++++++++++++++++ acceptance/selftest/script | 21 +++++++++++++++++++++ acceptance/selftest/test.toml | 11 +++++++++++ 6 files changed, 66 insertions(+), 6 deletions(-) create mode 100644 acceptance/selftest/out.hello.txt create mode 100644 acceptance/selftest/output.txt create mode 100644 acceptance/selftest/script create mode 100644 acceptance/selftest/test.toml diff --git a/acceptance/README.md b/acceptance/README.md index 42a37d253..75ac1d5fc 100644 --- a/acceptance/README.md +++ b/acceptance/README.md @@ -17,3 +17,5 @@ For more complex tests one can also use: - `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script. - `trace` helper: prints the arguments before executing the command. - custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`. + +See [selftest](./selftest) for a toy test. diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 11fd3f2ee..6b70c6a7f 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -60,12 +60,7 @@ func TestInprocessMode(t *testing.T) { if InprocessMode { t.Skip("Already tested by TestAccept") } - if runtime.GOOS == "windows" { - // - catalogs A catalog is the first layer of Unity Catalog’s three-level namespace. - // + catalogs A catalog is the first layer of Unity Catalog�s three-level namespace. - t.Skip("Fails on CI on unicode characters") - } - require.NotZero(t, testAccept(t, true, "help")) + require.Equal(t, 1, testAccept(t, true, "selftest")) } func testAccept(t *testing.T, InprocessMode bool, singleTest string) int { diff --git a/acceptance/selftest/out.hello.txt b/acceptance/selftest/out.hello.txt new file mode 100644 index 000000000..e427984d4 --- /dev/null +++ b/acceptance/selftest/out.hello.txt @@ -0,0 +1 @@ +HELLO diff --git a/acceptance/selftest/output.txt b/acceptance/selftest/output.txt new file mode 100644 index 000000000..d1830e01f --- /dev/null +++ b/acceptance/selftest/output.txt @@ -0,0 +1,30 @@ +=== Capturing STDERR +>>> python3 -c import sys; sys.stderr.write("STDERR\n") +STDERR + +=== Capturing STDOUT +>>> python3 -c import sys; sys.stderr.write("STDOUT\n") +STDOUT + +=== Capturing exit code +>>> errcode python3 -c raise SystemExit(5) + +Exit code: 5 + +=== Capturing exit code (alt) +>>> python3 -c raise SystemExit(7) + +Exit code: 7 + +=== Capturing pwd +>>> python3 -c import os; print(os.getcwd()) +$TMPDIR + +=== Capturing subdir +>>> mkdir -p subdir/a/b/c + +>>> withdir subdir/a/b/c python3 -c import os; print(os.getcwd()) +$TMPDIR/subdir/a/b/c + +=== Custom output files - everything starting with out is captured and compared +>>> echo HELLO diff --git a/acceptance/selftest/script b/acceptance/selftest/script new file mode 100644 index 000000000..89201d925 --- /dev/null +++ b/acceptance/selftest/script @@ -0,0 +1,21 @@ +printf "=== Capturing STDERR" +trace python3 -c 'import sys; sys.stderr.write("STDERR\n")' + +printf "\n=== Capturing STDOUT" +trace python3 -c 'import sys; sys.stderr.write("STDOUT\n")' + +printf "\n=== Capturing exit code" +trace errcode python3 -c 'raise SystemExit(5)' + +printf "\n=== Capturing exit code (alt)" +errcode trace python3 -c 'raise SystemExit(7)' + +printf "\n=== Capturing pwd" +trace python3 -c 'import os; print(os.getcwd())' + +printf "\n=== Capturing subdir" +trace mkdir -p subdir/a/b/c +trace withdir subdir/a/b/c python3 -c 'import os; print(os.getcwd())' | sed 's/\\/\//g' + +printf "\n=== Custom output files - everything starting with out is captured and compared" +trace echo HELLO > out.hello.txt diff --git a/acceptance/selftest/test.toml b/acceptance/selftest/test.toml new file mode 100644 index 000000000..d867a4fd7 --- /dev/null +++ b/acceptance/selftest/test.toml @@ -0,0 +1,11 @@ +# Badness = "Brief description of what's wrong with the test output, if anything" + +#[GOOS] +# Disable on Windows +#windows = false + +# Disable on Mac +#mac = false + +# Disable on Linux +#linux = false From 1cb32eca907872556b94890e3666ffac531a0f29 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 10:11:06 +0100 Subject: [PATCH 10/25] acc: Support custom replacements (#2231) ## Changes - Ability to extend a list of replacements via test.toml - Modify selftest to both demo this feature and to get rid of sed on Windows. ## Tests Acceptance tests. I'm also using it https://github.com/databricks/cli/pull/2213 for things like pid. --- acceptance/acceptance_test.go | 1 + acceptance/config_test.go | 5 +++++ acceptance/selftest/output.txt | 5 +++++ acceptance/selftest/script | 7 ++++++- acceptance/selftest/test.toml | 9 +++++++++ 5 files changed, 26 insertions(+), 1 deletion(-) diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 6b70c6a7f..e48bd9908 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -190,6 +190,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont } repls.SetPathWithParents(tmpDir, "$TMPDIR") + repls.Repls = append(repls.Repls, config.Repls...) scriptContents := readMergedScriptContents(t, dir) testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents) diff --git a/acceptance/config_test.go b/acceptance/config_test.go index 49dce06ba..41866c4a7 100644 --- a/acceptance/config_test.go +++ b/acceptance/config_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/BurntSushi/toml" + "github.com/databricks/cli/libs/testdiff" "github.com/stretchr/testify/require" ) @@ -24,6 +25,10 @@ type TestConfig struct { // Which OSes the test is enabled on. Each string is compared against runtime.GOOS. // If absent, default to true. GOOS map[string]bool + + // List of additional replacements to apply on this test. + // Old is a regexp, New is a replacement expression. + Repls []testdiff.Replacement } // FindConfig finds the closest config file. diff --git a/acceptance/selftest/output.txt b/acceptance/selftest/output.txt index d1830e01f..9fdfbc1e7 100644 --- a/acceptance/selftest/output.txt +++ b/acceptance/selftest/output.txt @@ -28,3 +28,8 @@ $TMPDIR/subdir/a/b/c === Custom output files - everything starting with out is captured and compared >>> echo HELLO + +=== Custom regex can be specified in [[Repl]] section +1234 +CUSTOM_NUMBER_REGEX +123456 diff --git a/acceptance/selftest/script b/acceptance/selftest/script index 89201d925..665726167 100644 --- a/acceptance/selftest/script +++ b/acceptance/selftest/script @@ -15,7 +15,12 @@ trace python3 -c 'import os; print(os.getcwd())' printf "\n=== Capturing subdir" trace mkdir -p subdir/a/b/c -trace withdir subdir/a/b/c python3 -c 'import os; print(os.getcwd())' | sed 's/\\/\//g' +trace withdir subdir/a/b/c python3 -c 'import os; print(os.getcwd())' printf "\n=== Custom output files - everything starting with out is captured and compared" trace echo HELLO > out.hello.txt + +printf "\n=== Custom regex can be specified in [[Repl]] section\n" +echo 1234 +echo 12345 +echo 123456 diff --git a/acceptance/selftest/test.toml b/acceptance/selftest/test.toml index d867a4fd7..9607ec5df 100644 --- a/acceptance/selftest/test.toml +++ b/acceptance/selftest/test.toml @@ -9,3 +9,12 @@ # Disable on Linux #linux = false + +[[Repls]] +Old = '\b[0-9]{5}\b' +New = "CUSTOM_NUMBER_REGEX" + +[[Repls]] +# Fix path with reverse slashes in the output for Windows. +Old = '\$TMPDIR\\subdir\\a\\b\\c' +New = '$$TMPDIR/subdir/a/b/c' From 6e8f0ea8afeecf86c3edd42d0ccccbacf25353d2 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 11:33:16 +0100 Subject: [PATCH 11/25] CI: Move ruff to 'lint' job (#2232) This is where it belongs and also there is no need to run it 3 times. --- .github/workflows/push.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index c3a314d69..2a8a68862 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -60,12 +60,6 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0 - - name: Run ruff - uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1 - with: - version: "0.9.1" - args: "format --check" - - name: Set go env run: | echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV @@ -80,7 +74,7 @@ jobs: - name: Run tests with coverage run: make cover - golangci: + linters: needs: cleanups name: lint runs-on: ubuntu-latest @@ -105,6 +99,11 @@ jobs: with: version: v1.63.4 args: --timeout=15m + - name: Run ruff + uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1 + with: + version: "0.9.1" + args: "format --check" validate-bundle-schema: needs: cleanups From b7dd70b8b3c59d64ab7b54805750b532b0d75f07 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 13:22:40 +0100 Subject: [PATCH 12/25] acc: Add a couple of error tests for 'bundle init' (#2233) This captures how we log errors related to subprocess run and what does the output look like. --- acceptance/bundle/templates/wrong-path/output.txt | 3 +++ acceptance/bundle/templates/wrong-path/script | 2 ++ acceptance/bundle/templates/wrong-path/test.toml | 1 + acceptance/bundle/templates/wrong-url/output.txt | 5 +++++ acceptance/bundle/templates/wrong-url/script | 2 ++ acceptance/bundle/templates/wrong-url/test.toml | 7 +++++++ 6 files changed, 20 insertions(+) create mode 100644 acceptance/bundle/templates/wrong-path/output.txt create mode 100644 acceptance/bundle/templates/wrong-path/script create mode 100644 acceptance/bundle/templates/wrong-path/test.toml create mode 100644 acceptance/bundle/templates/wrong-url/output.txt create mode 100644 acceptance/bundle/templates/wrong-url/script create mode 100644 acceptance/bundle/templates/wrong-url/test.toml diff --git a/acceptance/bundle/templates/wrong-path/output.txt b/acceptance/bundle/templates/wrong-path/output.txt new file mode 100644 index 000000000..0a6fdfc84 --- /dev/null +++ b/acceptance/bundle/templates/wrong-path/output.txt @@ -0,0 +1,3 @@ +Error: not a bundle template: expected to find a template schema file at databricks_template_schema.json + +Exit code: 1 diff --git a/acceptance/bundle/templates/wrong-path/script b/acceptance/bundle/templates/wrong-path/script new file mode 100644 index 000000000..00c05927a --- /dev/null +++ b/acceptance/bundle/templates/wrong-path/script @@ -0,0 +1,2 @@ +export NO_COLOR=1 +$CLI bundle init /DOES/NOT/EXIST diff --git a/acceptance/bundle/templates/wrong-path/test.toml b/acceptance/bundle/templates/wrong-path/test.toml new file mode 100644 index 000000000..4bbcb5100 --- /dev/null +++ b/acceptance/bundle/templates/wrong-path/test.toml @@ -0,0 +1 @@ +Badness = 'The error message should include full path: "expected to find a template schema file at databricks_template_schema.json"' diff --git a/acceptance/bundle/templates/wrong-url/output.txt b/acceptance/bundle/templates/wrong-url/output.txt new file mode 100644 index 000000000..b78cf4b68 --- /dev/null +++ b/acceptance/bundle/templates/wrong-url/output.txt @@ -0,0 +1,5 @@ +Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world $TMPDIR_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '$TMPDIR_GPARENT/world-123456'... +fatal: unable to access 'https://invalid-domain-123.databricks.com/hello/world/': Could not resolve host: invalid-domain-123.databricks.com + + +Exit code: 1 diff --git a/acceptance/bundle/templates/wrong-url/script b/acceptance/bundle/templates/wrong-url/script new file mode 100644 index 000000000..e9bc0f4f6 --- /dev/null +++ b/acceptance/bundle/templates/wrong-url/script @@ -0,0 +1,2 @@ +export NO_COLOR=1 +$CLI bundle init https://invalid-domain-123.databricks.com/hello/world diff --git a/acceptance/bundle/templates/wrong-url/test.toml b/acceptance/bundle/templates/wrong-url/test.toml new file mode 100644 index 000000000..0bb24bf1a --- /dev/null +++ b/acceptance/bundle/templates/wrong-url/test.toml @@ -0,0 +1,7 @@ +[[Repls]] +Old = '\\' +New = '/' + +[[Repls]] +Old = '/world-[0-9]+' +New = '/world-123456' From 4595c6f1b5d4890b6c9a1e13257319d52954dfe5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Jan 2025 14:11:07 +0100 Subject: [PATCH 13/25] Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 (#2238) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.55.0 to 0.56.1.
Release notes

Sourced from github.com/databricks/databricks-sdk-go's releases.

v0.56.1

Bug Fixes

  • Do not send query parameters when set to zero value (#1136).

v0.56.0

Bug Fixes

  • Support Query parameters for all HTTP operations (#1124).

Internal Changes

  • Add download target to MakeFile (#1125).
  • Delete examples/mocking module (#1126).
  • Scope the traversing directory in the Recursive list workspace test (#1120).

API Changes:

... (truncated)

Changelog

Sourced from github.com/databricks/databricks-sdk-go's changelog.

[Release] Release v0.56.1

Bug Fixes

  • Do not send query parameters when set to zero value (#1136).

[Release] Release v0.56.0

Bug Fixes

  • Support Query parameters for all HTTP operations (#1124).

Internal Changes

  • Add download target to MakeFile (#1125).
  • Delete examples/mocking module (#1126).
  • Scope the traversing directory in the Recursive list workspace test (#1120).

API Changes:

... (truncated)

Commits
  • bf617bb [Release] Release v0.56.1 (#1137)
  • 18cebf1 [Fix] Do not send query parameters when set to zero value (#1136)
  • 28ff749 [Release] Release v0.56.0 (#1134)
  • 1134540 [Internal] Add download target to MakeFile (#1125)
  • e079db9 [Fix] Support Query parameters for all HTTP operations (#1124)
  • 1045fb9 [Internal] Delete examples/mocking module (#1126)
  • 914ab6b [Internal] Scope the traversing directory in the Recursive list workspace tes...
  • See full diff in compare view

Most Recent Ignore Conditions Applied to This Pull Request | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] |
[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.55.0&new-version=0.56.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester --- .codegen/_openapi_sha | 2 +- .codegen/service.go.tmpl | 20 +- .gitattributes | 1 + bundle/deploy/terraform/convert_test.go | 4 +- .../convert_model_serving_endpoint_test.go | 2 +- .../internal/schema/annotations_openapi.yml | 367 +++++++++++------- .../schema/annotations_openapi_overrides.yml | 11 + bundle/schema/jsonschema.json | 175 +++++---- .../custom-app-integration.go | 1 + cmd/api/api.go | 2 +- .../access-control/access-control.go | 109 ++++++ cmd/workspace/cmd.go | 2 + cmd/workspace/providers/providers.go | 4 +- cmd/workspace/recipients/recipients.go | 96 ++--- .../serving-endpoints/serving-endpoints.go | 111 +++++- go.mod | 2 +- go.sum | 4 +- integration/cmd/sync/sync_test.go | 2 +- libs/filer/files_client.go | 4 +- libs/filer/workspace_files_client.go | 5 +- .../workspace_files_extensions_client_test.go | 2 +- libs/git/info.go | 1 + 22 files changed, 588 insertions(+), 339 deletions(-) create mode 100755 cmd/workspace/access-control/access-control.go diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index dfe78790a..588cf9d63 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -779817ed8d63031f5ea761fbd25ee84f38feec0d \ No newline at end of file +0be1b914249781b5e903b7676fd02255755bc851 \ No newline at end of file diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl index 0c9fa089a..2f4987b13 100644 --- a/.codegen/service.go.tmpl +++ b/.codegen/service.go.tmpl @@ -109,16 +109,19 @@ var {{.CamelName}}Overrides []func( {{- end }} ) +{{- $excludeFromJson := list "http-request"}} + func new{{.PascalName}}() *cobra.Command { cmd := &cobra.Command{} + {{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}} {{- if .Request}} var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}} {{- if .RequestBodyField }} {{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{} {{- end }} - {{- if .CanUseJson}} + {{- if $canUseJson}} var {{.CamelName}}Json flags.JsonFlag {{- end}} {{- end}} @@ -135,7 +138,7 @@ func new{{.PascalName}}() *cobra.Command { {{- $request = .RequestBodyField.Entity -}} {{- end -}} {{if $request }}// TODO: short flags - {{- if .CanUseJson}} + {{- if $canUseJson}} cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`) {{- end}} {{$method := .}} @@ -177,7 +180,7 @@ func new{{.PascalName}}() *cobra.Command { {{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}} {{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}} {{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}} - {{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}} + {{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}} {{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}} {{- $atleastOneArgumentWithDescription := false -}} @@ -239,7 +242,7 @@ func new{{.PascalName}}() *cobra.Command { ctx := cmd.Context() {{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}} {{- if .Request }} - {{ if .CanUseJson }} + {{ if $canUseJson }} if cmd.Flags().Changed("json") { diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }}) if diags.HasError() { @@ -255,7 +258,7 @@ func new{{.PascalName}}() *cobra.Command { return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") }{{- end}} {{- if $hasPosArgs }} - {{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else { + {{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else { {{- end}} {{- if $hasIdPrompt}} if len(args) == 0 { @@ -279,9 +282,9 @@ func new{{.PascalName}}() *cobra.Command { {{$method := .}} {{- range $arg, $field := .RequiredPositionalArguments}} - {{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}} + {{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}} {{- end -}} - {{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} + {{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} } {{- end}} @@ -392,7 +395,8 @@ func new{{.PascalName}}() *cobra.Command { {{- $method := .Method -}} {{- $arg := .Arg -}} {{- $hasIdPrompt := .HasIdPrompt -}} - {{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }} + {{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }} + {{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }} {{- if $optionalIfJsonIsUsed }} if !cmd.Flags().Changed("json") { {{- end }} diff --git a/.gitattributes b/.gitattributes index 0a8ddf3cb..ebe94ed8e 100755 --- a/.gitattributes +++ b/.gitattributes @@ -31,6 +31,7 @@ cmd/account/users/users.go linguist-generated=true cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true cmd/account/workspaces/workspaces.go linguist-generated=true +cmd/workspace/access-control/access-control.go linguist-generated=true cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index ffe55db71..afc1fb22a 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -419,7 +419,7 @@ func TestBundleToTerraformModelServing(t *testing.T) { src := resources.ModelServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{ Name: "name", - Config: serving.EndpointCoreConfigInput{ + Config: &serving.EndpointCoreConfigInput{ ServedModels: []serving.ServedModelInput{ { ModelName: "model_name", @@ -474,7 +474,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) { // and as such observed the `omitempty` tag. // The new method leverages [dyn.Value] where any field that is not // explicitly set is not part of the value. - Config: serving.EndpointCoreConfigInput{ + Config: &serving.EndpointCoreConfigInput{ ServedModels: []serving.ServedModelInput{ { ModelName: "model_name", diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go index d46350bb7..98cf2dc22 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go @@ -17,7 +17,7 @@ func TestConvertModelServingEndpoint(t *testing.T) { src := resources.ModelServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{ Name: "name", - Config: serving.EndpointCoreConfigInput{ + Config: &serving.EndpointCoreConfigInput{ ServedModels: []serving.ServedModelInput{ { ModelName: "model_name", diff --git a/bundle/internal/schema/annotations_openapi.yml b/bundle/internal/schema/annotations_openapi.yml index 8ff5c9253..d5a9bf69e 100644 --- a/bundle/internal/schema/annotations_openapi.yml +++ b/bundle/internal/schema/annotations_openapi.yml @@ -353,12 +353,12 @@ github.com/databricks/cli/bundle/config/resources.MlflowModel: github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint: "ai_gateway": "description": |- - The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now. + The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported. "config": "description": |- The core config of the serving endpoint. "name": - "description": | + "description": |- The name of the serving endpoint. This field is required and must be unique across a Databricks workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. "rate_limits": @@ -1974,6 +1974,9 @@ github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask: Parameters passed to the main method. Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs. + "run_as_repl": + "description": |- + Deprecated. A value of `false` is no longer supported. github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask: "parameters": "description": |- @@ -2684,27 +2687,36 @@ github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScd github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig: "ai21labs_api_key": "description": |- - The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`. + The Databricks secret key reference for an AI21 Labs API key. If you + prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. + You must provide an API key using one of the following fields: + `ai21labs_api_key` or `ai21labs_api_key_plaintext`. "ai21labs_api_key_plaintext": "description": |- - An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`. + An AI21 Labs API key provided as a plaintext string. If you prefer to + reference your key using Databricks Secrets, see `ai21labs_api_key`. You + must provide an API key using one of the following fields: + `ai21labs_api_key` or `ai21labs_api_key_plaintext`. github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig: "guardrails": "description": |- Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses. "inference_table_config": "description": |- - Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality. + Configuration for payload logging using inference tables. + Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality. "rate_limits": "description": |- Configuration for rate limits which can be set to limit endpoint traffic. "usage_tracking_config": "description": |- - Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs. + Configuration to enable usage tracking using system tables. + These tables allow you to monitor operational usage on endpoints and their associated costs. github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters: "invalid_keywords": "description": |- - List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content. + List of invalid keywords. + AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content. "pii": "description": |- Configuration for guardrail PII filter. @@ -2713,15 +2725,14 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParame Indicates whether the safety filter is enabled. "valid_topics": "description": |- - The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics. + The list of allowed topics. + Given a chat request, this guardrail flags the request if its topic is not in the allowed topics. github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior: "behavior": "description": |- - Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned. + Configuration for input guardrail filters. github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior: "_": - "description": |- - Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned. "enum": - |- NONE @@ -2737,30 +2748,32 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails: github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig: "catalog_name": "description": |- - The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name. + The name of the catalog in Unity Catalog. Required when enabling inference tables. + NOTE: On update, you have to disable inference table first in order to change the catalog name. "enabled": "description": |- Indicates whether the inference table is enabled. "schema_name": "description": |- - The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name. + The name of the schema in Unity Catalog. Required when enabling inference tables. + NOTE: On update, you have to disable inference table first in order to change the schema name. "table_name_prefix": "description": |- - The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name. + The prefix of the table in Unity Catalog. + NOTE: On update, you have to disable inference table first in order to change the prefix name. github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit: "calls": "description": |- Used to specify how many calls are allowed for a key within the renewal_period. "key": "description": |- - Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified. + Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, + with 'endpoint' being the default if not specified. "renewal_period": "description": |- Renewal period field for a rate limit. Currently, only 'minute' is supported. github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey: "_": - "description": |- - Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified. "enum": - |- user @@ -2768,8 +2781,6 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey: endpoint github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod: "_": - "description": |- - Renewal period field for a rate limit. Currently, only 'minute' is supported. "enum": - |- minute @@ -2780,26 +2791,43 @@ github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingCo github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig: "aws_access_key_id": "description": |- - The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`. + The Databricks secret key reference for an AWS access key ID with + permissions to interact with Bedrock services. If you prefer to paste + your API key directly, see `aws_access_key_id_plaintext`. You must provide an API + key using one of the following fields: `aws_access_key_id` or + `aws_access_key_id_plaintext`. "aws_access_key_id_plaintext": "description": |- - An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`. + An AWS access key ID with permissions to interact with Bedrock services + provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `aws_access_key_id`. You must provide an API key + using one of the following fields: `aws_access_key_id` or + `aws_access_key_id_plaintext`. "aws_region": "description": |- The AWS region to use. Bedrock has to be enabled there. "aws_secret_access_key": "description": |- - The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`. + The Databricks secret key reference for an AWS secret access key paired + with the access key ID, with permissions to interact with Bedrock + services. If you prefer to paste your API key directly, see + `aws_secret_access_key_plaintext`. You must provide an API key using one + of the following fields: `aws_secret_access_key` or + `aws_secret_access_key_plaintext`. "aws_secret_access_key_plaintext": "description": |- - An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`. + An AWS secret access key paired with the access key ID, with permissions + to interact with Bedrock services provided as a plaintext string. If you + prefer to reference your key using Databricks Secrets, see + `aws_secret_access_key`. You must provide an API key using one of the + following fields: `aws_secret_access_key` or + `aws_secret_access_key_plaintext`. "bedrock_provider": "description": |- - The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon. + The underlying provider in Amazon Bedrock. Supported values (case + insensitive) include: Anthropic, Cohere, AI21Labs, Amazon. github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider: "_": - "description": |- - The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon. "enum": - |- anthropic @@ -2812,10 +2840,16 @@ github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedro github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig: "anthropic_api_key": "description": |- - The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`. + The Databricks secret key reference for an Anthropic API key. If you + prefer to paste your API key directly, see `anthropic_api_key_plaintext`. + You must provide an API key using one of the following fields: + `anthropic_api_key` or `anthropic_api_key_plaintext`. "anthropic_api_key_plaintext": "description": |- - The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`. + The Anthropic API key provided as a plaintext string. If you prefer to + reference your key using Databricks Secrets, see `anthropic_api_key`. You + must provide an API key using one of the following fields: + `anthropic_api_key` or `anthropic_api_key_plaintext`. github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput: "catalog_name": "description": |- @@ -2831,42 +2865,58 @@ github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput: The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled. github.com/databricks/databricks-sdk-go/service/serving.CohereConfig: "cohere_api_base": - "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n" + "description": |- + This is an optional field to provide a customized base URL for the Cohere + API. If left unspecified, the standard Cohere base URL is used. "cohere_api_key": "description": |- - The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`. + The Databricks secret key reference for a Cohere API key. If you prefer + to paste your API key directly, see `cohere_api_key_plaintext`. You must + provide an API key using one of the following fields: `cohere_api_key` or + `cohere_api_key_plaintext`. "cohere_api_key_plaintext": "description": |- - The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`. + The Cohere API key provided as a plaintext string. If you prefer to + reference your key using Databricks Secrets, see `cohere_api_key`. You + must provide an API key using one of the following fields: + `cohere_api_key` or `cohere_api_key_plaintext`. github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig: "databricks_api_token": - "description": | - The Databricks secret key reference for a Databricks API token that corresponds to a user or service - principal with Can Query access to the model serving endpoint pointed to by this external model. - If you prefer to paste your API key directly, see `databricks_api_token_plaintext`. - You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`. + "description": |- + The Databricks secret key reference for a Databricks API token that + corresponds to a user or service principal with Can Query access to the + model serving endpoint pointed to by this external model. If you prefer + to paste your API key directly, see `databricks_api_token_plaintext`. You + must provide an API key using one of the following fields: + `databricks_api_token` or `databricks_api_token_plaintext`. "databricks_api_token_plaintext": - "description": | - The Databricks API token that corresponds to a user or service - principal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string. - If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`. - You must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`. + "description": |- + The Databricks API token that corresponds to a user or service principal + with Can Query access to the model serving endpoint pointed to by this + external model provided as a plaintext string. If you prefer to reference + your key using Databricks Secrets, see `databricks_api_token`. You must + provide an API key using one of the following fields: + `databricks_api_token` or `databricks_api_token_plaintext`. "databricks_workspace_url": - "description": | - The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model. + "description": |- + The URL of the Databricks workspace containing the model serving endpoint + pointed to by this external model. github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput: "auto_capture_config": "description": |- Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog. + Note: this field is deprecated for creating new provisioned throughput endpoints, + or updating existing provisioned throughput endpoints that never have inference table configured; + in these cases please use AI Gateway to manage inference tables. "served_entities": "description": |- - A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities. + The list of served entities under the serving endpoint config. "served_models": "description": |- - (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models. + (Deprecated, use served_entities instead) The list of served models under the serving endpoint config. "traffic_config": "description": |- - The traffic config defining how invocations to the serving endpoint should be routed. + The traffic configuration associated with the serving endpoint config. github.com/databricks/databricks-sdk-go/service/serving.EndpointTag: "key": "description": |- @@ -2903,17 +2953,13 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModel: "description": |- PaLM Config. Only required if the provider is 'palm'. "provider": - "description": | - The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', - 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.", + "description": |- + The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'. "task": "description": |- The task type of the external model. github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider: "_": - "description": | - The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', - 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.", "enum": - |- ai21labs @@ -2934,70 +2980,114 @@ github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider: github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig: "private_key": "description": |- - The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext` + The Databricks secret key reference for a private key for the service + account which has access to the Google Cloud Vertex AI Service. See [Best + practices for managing service account keys]. If you prefer to paste your + API key directly, see `private_key_plaintext`. You must provide an API + key using one of the following fields: `private_key` or + `private_key_plaintext` + + [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys "private_key_plaintext": "description": |- - The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`. + The private key for the service account which has access to the Google + Cloud Vertex AI Service provided as a plaintext secret. See [Best + practices for managing service account keys]. If you prefer to reference + your key using Databricks Secrets, see `private_key`. You must provide an + API key using one of the following fields: `private_key` or + `private_key_plaintext`. + + [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys "project_id": "description": |- - This is the Google Cloud project id that the service account is associated with. + This is the Google Cloud project id that the service account is + associated with. "region": "description": |- - This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions. + This is the region for the Google Cloud Vertex AI Service. See [supported + regions] for more details. Some models are only available in specific + regions. + + [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig: + "_": + "description": |- + Configs needed to create an OpenAI model route. "microsoft_entra_client_id": - "description": | - This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID. + "description": |- + This field is only required for Azure AD OpenAI and is the Microsoft + Entra Client ID. "microsoft_entra_client_secret": - "description": | - The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication. - If you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`. - You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`. + "description": |- + The Databricks secret key reference for a client secret used for + Microsoft Entra ID authentication. If you prefer to paste your client + secret directly, see `microsoft_entra_client_secret_plaintext`. You must + provide an API key using one of the following fields: + `microsoft_entra_client_secret` or + `microsoft_entra_client_secret_plaintext`. "microsoft_entra_client_secret_plaintext": - "description": | - The client secret used for Microsoft Entra ID authentication provided as a plaintext string. - If you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`. - You must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`. + "description": |- + The client secret used for Microsoft Entra ID authentication provided as + a plaintext string. If you prefer to reference your key using Databricks + Secrets, see `microsoft_entra_client_secret`. You must provide an API key + using one of the following fields: `microsoft_entra_client_secret` or + `microsoft_entra_client_secret_plaintext`. "microsoft_entra_tenant_id": - "description": | - This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID. + "description": |- + This field is only required for Azure AD OpenAI and is the Microsoft + Entra Tenant ID. "openai_api_base": - "description": | - This is a field to provide a customized base URl for the OpenAI API. - For Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service - provided by Azure. - For other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used. + "description": |- + This is a field to provide a customized base URl for the OpenAI API. For + Azure OpenAI, this field is required, and is the base URL for the Azure + OpenAI API service provided by Azure. For other OpenAI API types, this + field is optional, and if left unspecified, the standard OpenAI base URL + is used. "openai_api_key": "description": |- - The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`. + The Databricks secret key reference for an OpenAI API key using the + OpenAI or Azure service. If you prefer to paste your API key directly, + see `openai_api_key_plaintext`. You must provide an API key using one of + the following fields: `openai_api_key` or `openai_api_key_plaintext`. "openai_api_key_plaintext": "description": |- - The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`. + The OpenAI API key using the OpenAI or Azure service provided as a + plaintext string. If you prefer to reference your key using Databricks + Secrets, see `openai_api_key`. You must provide an API key using one of + the following fields: `openai_api_key` or `openai_api_key_plaintext`. "openai_api_type": - "description": | - This is an optional field to specify the type of OpenAI API to use. - For Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security - access validation protocol. For access token validation, use azure. For authentication using Azure Active + "description": |- + This is an optional field to specify the type of OpenAI API to use. For + Azure OpenAI, this field is required, and adjust this parameter to + represent the preferred security access validation protocol. For access + token validation, use azure. For authentication using Azure Active Directory (Azure AD) use, azuread. "openai_api_version": - "description": | - This is an optional field to specify the OpenAI API version. - For Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to - utilize, specified by a date. + "description": |- + This is an optional field to specify the OpenAI API version. For Azure + OpenAI, this field is required, and is the version of the Azure OpenAI + service to utilize, specified by a date. "openai_deployment_name": - "description": | - This field is only required for Azure OpenAI and is the name of the deployment resource for the - Azure OpenAI service. + "description": |- + This field is only required for Azure OpenAI and is the name of the + deployment resource for the Azure OpenAI service. "openai_organization": - "description": | - This is an optional field to specify the organization in OpenAI or Azure OpenAI. + "description": |- + This is an optional field to specify the organization in OpenAI or Azure + OpenAI. github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig: "palm_api_key": "description": |- - The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`. + The Databricks secret key reference for a PaLM API key. If you prefer to + paste your API key directly, see `palm_api_key_plaintext`. You must + provide an API key using one of the following fields: `palm_api_key` or + `palm_api_key_plaintext`. "palm_api_key_plaintext": "description": |- - The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`. + The PaLM API key provided as a plaintext string. If you prefer to + reference your key using Databricks Secrets, see `palm_api_key`. You must + provide an API key using one of the following fields: `palm_api_key` or + `palm_api_key_plaintext`. github.com/databricks/databricks-sdk-go/service/serving.RateLimit: "calls": "description": |- @@ -3010,8 +3100,6 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimit: Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported. github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey: "_": - "description": |- - Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified. "enum": - |- user @@ -3019,8 +3107,6 @@ github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey: endpoint github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod: "_": - "description": |- - Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported. "enum": - |- minute @@ -3033,21 +3119,15 @@ github.com/databricks/databricks-sdk-go/service/serving.Route: The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive. github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: "entity_name": - "description": | - The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), - or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of - __catalog_name__.__schema_name__.__model_name__. - "entity_version": "description": |- - The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC. + The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**. + "entity_version": {} "environment_vars": - "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`" + "description": |- + An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}` "external_model": - "description": | - The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) - can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, - it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. - The task type of all external models within an endpoint must be the same. + "description": |- + The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same. "instance_profile_arn": "description": |- ARN of the instance profile that the served entity uses to access AWS resources. @@ -3058,68 +3138,46 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: "description": |- The minimum tokens per second that the endpoint can scale down to. "name": - "description": | - The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. - If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other - entities, it defaults to -. + "description": |- + The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version. "scale_to_zero_enabled": "description": |- Whether the compute resources for the served entity should scale down to zero. "workload_size": - "description": | - The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. - A single unit of provisioned concurrency can process one request at a time. - Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). - If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. + "description": |- + The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. "workload_type": - "description": | - The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is - "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. - See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types). + "description": |- + The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types). github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput: "environment_vars": - "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`" + "description": |- + An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}` "instance_profile_arn": "description": |- - ARN of the instance profile that the served model will use to access AWS resources. + ARN of the instance profile that the served entity uses to access AWS resources. "max_provisioned_throughput": "description": |- The maximum tokens per second that the endpoint can scale up to. "min_provisioned_throughput": "description": |- The minimum tokens per second that the endpoint can scale down to. - "model_name": - "description": | - The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model, - in the form of __catalog_name__.__schema_name__.__model_name__. - "model_version": - "description": |- - The version of the model in Databricks Model Registry or Unity Catalog to be served. + "model_name": {} + "model_version": {} "name": - "description": | - The name of a served model. It must be unique across an endpoint. If not specified, this field will default to -. - A served model name can consist of alphanumeric characters, dashes, and underscores. + "description": |- + The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version. "scale_to_zero_enabled": "description": |- - Whether the compute resources for the served model should scale down to zero. + Whether the compute resources for the served entity should scale down to zero. "workload_size": - "description": | - The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. - A single unit of provisioned concurrency can process one request at a time. - Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). - If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0. + "description": |- + The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0. "workload_type": - "description": | - The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is - "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. - See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types). + "description": |- + The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types). github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize: "_": - "description": | - The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. - A single unit of provisioned concurrency can process one request at a time. - Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). - If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0. "enum": - |- Small @@ -3129,17 +3187,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkload Large github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType: "_": - "description": | - The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is - "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. - See the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types). "enum": - |- CPU + - |- + GPU_MEDIUM - |- GPU_SMALL + - |- + GPU_LARGE + - |- + MULTIGPU_MEDIUM +github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType: + "_": + "enum": + - |- + CPU - |- GPU_MEDIUM + - |- + GPU_SMALL - |- GPU_LARGE - |- diff --git a/bundle/internal/schema/annotations_openapi_overrides.yml b/bundle/internal/schema/annotations_openapi_overrides.yml index 120a12543..323432fa3 100644 --- a/bundle/internal/schema/annotations_openapi_overrides.yml +++ b/bundle/internal/schema/annotations_openapi_overrides.yml @@ -197,3 +197,14 @@ github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger: "manual": "description": |- PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput: + "entity_version": + "description": |- + PLACEHOLDER +github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput: + "model_name": + "description": |- + PLACEHOLDER + "model_version": + "description": |- + PLACEHOLDER diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 4a3b56814..17a621ba0 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -546,7 +546,7 @@ "type": "object", "properties": { "ai_gateway": { - "description": "The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.", + "description": "The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned throughput endpoints are currently supported.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig" }, "config": { @@ -554,7 +554,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput" }, "name": { - "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.\n", + "description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.", "$ref": "#/$defs/string" }, "permissions": { @@ -575,7 +575,6 @@ }, "additionalProperties": false, "required": [ - "config", "name" ] }, @@ -4142,6 +4141,10 @@ "parameters": { "description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.", "$ref": "#/$defs/slice/string" + }, + "run_as_repl": { + "description": "Deprecated. A value of `false` is no longer supported.", + "$ref": "#/$defs/bool" } }, "additionalProperties": false @@ -5502,11 +5505,11 @@ "type": "object", "properties": { "ai21labs_api_key": { - "description": "The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.", + "description": "The Databricks secret key reference for an AI21 Labs API key. If you\nprefer to paste your API key directly, see `ai21labs_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string" }, "ai21labs_api_key_plaintext": { - "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.", + "description": "An AI21 Labs API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `ai21labs_api_key`. You\nmust provide an API key using one of the following fields:\n`ai21labs_api_key` or `ai21labs_api_key_plaintext`.", "$ref": "#/$defs/string" } }, @@ -5528,7 +5531,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails" }, "inference_table_config": { - "description": "Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.", + "description": "Configuration for payload logging using inference tables.\nUse these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig" }, "rate_limits": { @@ -5536,7 +5539,7 @@ "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit" }, "usage_tracking_config": { - "description": "Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.", + "description": "Configuration to enable usage tracking using system tables.\nThese tables allow you to monitor operational usage on endpoints and their associated costs.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig" } }, @@ -5554,7 +5557,7 @@ "type": "object", "properties": { "invalid_keywords": { - "description": "List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.", + "description": "List of invalid keywords.\nAI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.", "$ref": "#/$defs/slice/string" }, "pii": { @@ -5566,7 +5569,7 @@ "$ref": "#/$defs/bool" }, "valid_topics": { - "description": "The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.", + "description": "The list of allowed topics.\nGiven a chat request, this guardrail flags the request if its topic is not in the allowed topics.", "$ref": "#/$defs/slice/string" } }, @@ -5584,14 +5587,11 @@ "type": "object", "properties": { "behavior": { - "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.", + "description": "Configuration for input guardrail filters.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior" } }, - "additionalProperties": false, - "required": [ - "behavior" - ] + "additionalProperties": false }, { "type": "string", @@ -5603,7 +5603,6 @@ "oneOf": [ { "type": "string", - "description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.", "enum": [ "NONE", "BLOCK" @@ -5643,7 +5642,7 @@ "type": "object", "properties": { "catalog_name": { - "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.", + "description": "The name of the catalog in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the catalog name.", "$ref": "#/$defs/string" }, "enabled": { @@ -5651,11 +5650,11 @@ "$ref": "#/$defs/bool" }, "schema_name": { - "description": "The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.", + "description": "The name of the schema in Unity Catalog. Required when enabling inference tables.\nNOTE: On update, you have to disable inference table first in order to change the schema name.", "$ref": "#/$defs/string" }, "table_name_prefix": { - "description": "The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.", + "description": "The prefix of the table in Unity Catalog.\nNOTE: On update, you have to disable inference table first in order to change the prefix name.", "$ref": "#/$defs/string" } }, @@ -5674,10 +5673,10 @@ "properties": { "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int64" }, "key": { - "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", + "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported,\nwith 'endpoint' being the default if not specified.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey" }, "renewal_period": { @@ -5701,7 +5700,6 @@ "oneOf": [ { "type": "string", - "description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", "enum": [ "user", "endpoint" @@ -5717,7 +5715,6 @@ "oneOf": [ { "type": "string", - "description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.", "enum": [ "minute" ] @@ -5752,11 +5749,11 @@ "type": "object", "properties": { "aws_access_key_id": { - "description": "The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.", + "description": "The Databricks secret key reference for an AWS access key ID with\npermissions to interact with Bedrock services. If you prefer to paste\nyour API key directly, see `aws_access_key_id_plaintext`. You must provide an API\nkey using one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string" }, "aws_access_key_id_plaintext": { - "description": "An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.", + "description": "An AWS access key ID with permissions to interact with Bedrock services\nprovided as a plaintext string. If you prefer to reference your key using\nDatabricks Secrets, see `aws_access_key_id`. You must provide an API key\nusing one of the following fields: `aws_access_key_id` or\n`aws_access_key_id_plaintext`.", "$ref": "#/$defs/string" }, "aws_region": { @@ -5764,15 +5761,15 @@ "$ref": "#/$defs/string" }, "aws_secret_access_key": { - "description": "The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.", + "description": "The Databricks secret key reference for an AWS secret access key paired\nwith the access key ID, with permissions to interact with Bedrock\nservices. If you prefer to paste your API key directly, see\n`aws_secret_access_key_plaintext`. You must provide an API key using one\nof the following fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string" }, "aws_secret_access_key_plaintext": { - "description": "An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.", + "description": "An AWS secret access key paired with the access key ID, with permissions\nto interact with Bedrock services provided as a plaintext string. If you\nprefer to reference your key using Databricks Secrets, see\n`aws_secret_access_key`. You must provide an API key using one of the\nfollowing fields: `aws_secret_access_key` or\n`aws_secret_access_key_plaintext`.", "$ref": "#/$defs/string" }, "bedrock_provider": { - "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", + "description": "The underlying provider in Amazon Bedrock. Supported values (case\ninsensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider" } }, @@ -5792,7 +5789,6 @@ "oneOf": [ { "type": "string", - "description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.", "enum": [ "anthropic", "cohere", @@ -5812,11 +5808,11 @@ "type": "object", "properties": { "anthropic_api_key": { - "description": "The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.", + "description": "The Databricks secret key reference for an Anthropic API key. If you\nprefer to paste your API key directly, see `anthropic_api_key_plaintext`.\nYou must provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string" }, "anthropic_api_key_plaintext": { - "description": "The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.", + "description": "The Anthropic API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `anthropic_api_key`. You\nmust provide an API key using one of the following fields:\n`anthropic_api_key` or `anthropic_api_key_plaintext`.", "$ref": "#/$defs/string" } }, @@ -5864,15 +5860,15 @@ "type": "object", "properties": { "cohere_api_base": { - "description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n", + "description": "This is an optional field to provide a customized base URL for the Cohere\nAPI. If left unspecified, the standard Cohere base URL is used.", "$ref": "#/$defs/string" }, "cohere_api_key": { - "description": "The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.", + "description": "The Databricks secret key reference for a Cohere API key. If you prefer\nto paste your API key directly, see `cohere_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `cohere_api_key` or\n`cohere_api_key_plaintext`.", "$ref": "#/$defs/string" }, "cohere_api_key_plaintext": { - "description": "The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.", + "description": "The Cohere API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `cohere_api_key`. You\nmust provide an API key using one of the following fields:\n`cohere_api_key` or `cohere_api_key_plaintext`.", "$ref": "#/$defs/string" } }, @@ -5890,15 +5886,15 @@ "type": "object", "properties": { "databricks_api_token": { - "description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\nIf you prefer to paste your API key directly, see `databricks_api_token_plaintext`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n", + "description": "The Databricks secret key reference for a Databricks API token that\ncorresponds to a user or service principal with Can Query access to the\nmodel serving endpoint pointed to by this external model. If you prefer\nto paste your API key directly, see `databricks_api_token_plaintext`. You\nmust provide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string" }, "databricks_api_token_plaintext": { - "description": "The Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n", + "description": "The Databricks API token that corresponds to a user or service principal\nwith Can Query access to the model serving endpoint pointed to by this\nexternal model provided as a plaintext string. If you prefer to reference\nyour key using Databricks Secrets, see `databricks_api_token`. You must\nprovide an API key using one of the following fields:\n`databricks_api_token` or `databricks_api_token_plaintext`.", "$ref": "#/$defs/string" }, "databricks_workspace_url": { - "description": "The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.\n", + "description": "The URL of the Databricks workspace containing the model serving endpoint\npointed to by this external model.", "$ref": "#/$defs/string" } }, @@ -5919,19 +5915,19 @@ "type": "object", "properties": { "auto_capture_config": { - "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.", + "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.\nNote: this field is deprecated for creating new provisioned throughput endpoints,\nor updating existing provisioned throughput endpoints that never have inference table configured;\nin these cases please use AI Gateway to manage inference tables.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput" }, "served_entities": { - "description": "A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.", + "description": "The list of served entities under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput" }, "served_models": { - "description": "(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.", + "description": "(Deprecated, use served_entities instead) The list of served models under the serving endpoint config.", "$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput" }, "traffic_config": { - "description": "The traffic config defining how invocations to the serving endpoint should be routed.", + "description": "The traffic configuration associated with the serving endpoint config.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig" } }, @@ -6010,7 +6006,7 @@ "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig" }, "provider": { - "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n", + "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider" }, "task": { @@ -6035,7 +6031,6 @@ "oneOf": [ { "type": "string", - "description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n", "enum": [ "ai21labs", "anthropic", @@ -6059,23 +6054,27 @@ "type": "object", "properties": { "private_key": { - "description": "The Databricks secret key reference for a private key for the service account which has access to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`", + "description": "The Databricks secret key reference for a private key for the service\naccount which has access to the Google Cloud Vertex AI Service. See [Best\npractices for managing service account keys]. If you prefer to paste your\nAPI key directly, see `private_key_plaintext`. You must provide an API\nkey using one of the following fields: `private_key` or\n`private_key_plaintext`\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string" }, "private_key_plaintext": { - "description": "The private key for the service account which has access to the Google Cloud Vertex AI Service provided as a plaintext secret. See [Best practices for managing service account keys](https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys). If you prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an API key using one of the following fields: `private_key` or `private_key_plaintext`.", + "description": "The private key for the service account which has access to the Google\nCloud Vertex AI Service provided as a plaintext secret. See [Best\npractices for managing service account keys]. If you prefer to reference\nyour key using Databricks Secrets, see `private_key`. You must provide an\nAPI key using one of the following fields: `private_key` or\n`private_key_plaintext`.\n\n[Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys", "$ref": "#/$defs/string" }, "project_id": { - "description": "This is the Google Cloud project id that the service account is associated with.", + "description": "This is the Google Cloud project id that the service account is\nassociated with.", "$ref": "#/$defs/string" }, "region": { - "description": "This is the region for the Google Cloud Vertex AI Service. See [supported regions](https://cloud.google.com/vertex-ai/docs/general/locations) for more details. Some models are only available in specific regions.", + "description": "This is the region for the Google Cloud Vertex AI Service. See [supported\nregions] for more details. Some models are only available in specific\nregions.\n\n[supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations", "$ref": "#/$defs/string" } }, - "additionalProperties": false + "additionalProperties": false, + "required": [ + "project_id", + "region" + ] }, { "type": "string", @@ -6087,49 +6086,50 @@ "oneOf": [ { "type": "object", + "description": "Configs needed to create an OpenAI model route.", "properties": { "microsoft_entra_client_id": { - "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.\n", + "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Client ID.", "$ref": "#/$defs/string" }, "microsoft_entra_client_secret": { - "description": "The Databricks secret key reference for a client secret used for Microsoft Entra ID authentication.\nIf you prefer to paste your client secret directly, see `microsoft_entra_client_secret_plaintext`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n", + "description": "The Databricks secret key reference for a client secret used for\nMicrosoft Entra ID authentication. If you prefer to paste your client\nsecret directly, see `microsoft_entra_client_secret_plaintext`. You must\nprovide an API key using one of the following fields:\n`microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string" }, "microsoft_entra_client_secret_plaintext": { - "description": "The client secret used for Microsoft Entra ID authentication provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`.\nYou must provide an API key using one of the following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.\n", + "description": "The client secret used for Microsoft Entra ID authentication provided as\na plaintext string. If you prefer to reference your key using Databricks\nSecrets, see `microsoft_entra_client_secret`. You must provide an API key\nusing one of the following fields: `microsoft_entra_client_secret` or\n`microsoft_entra_client_secret_plaintext`.", "$ref": "#/$defs/string" }, "microsoft_entra_tenant_id": { - "description": "This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.\n", + "description": "This field is only required for Azure AD OpenAI and is the Microsoft\nEntra Tenant ID.", "$ref": "#/$defs/string" }, "openai_api_base": { - "description": "This is a field to provide a customized base URl for the OpenAI API.\nFor Azure OpenAI, this field is required, and is the base URL for the Azure OpenAI API service\nprovided by Azure.\nFor other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI base URL is used.\n", + "description": "This is a field to provide a customized base URl for the OpenAI API. For\nAzure OpenAI, this field is required, and is the base URL for the Azure\nOpenAI API service provided by Azure. For other OpenAI API types, this\nfield is optional, and if left unspecified, the standard OpenAI base URL\nis used.", "$ref": "#/$defs/string" }, "openai_api_key": { - "description": "The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.", + "description": "The Databricks secret key reference for an OpenAI API key using the\nOpenAI or Azure service. If you prefer to paste your API key directly,\nsee `openai_api_key_plaintext`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string" }, "openai_api_key_plaintext": { - "description": "The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.", + "description": "The OpenAI API key using the OpenAI or Azure service provided as a\nplaintext string. If you prefer to reference your key using Databricks\nSecrets, see `openai_api_key`. You must provide an API key using one of\nthe following fields: `openai_api_key` or `openai_api_key_plaintext`.", "$ref": "#/$defs/string" }, "openai_api_type": { - "description": "This is an optional field to specify the type of OpenAI API to use.\nFor Azure OpenAI, this field is required, and adjust this parameter to represent the preferred security\naccess validation protocol. For access token validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.\n", + "description": "This is an optional field to specify the type of OpenAI API to use. For\nAzure OpenAI, this field is required, and adjust this parameter to\nrepresent the preferred security access validation protocol. For access\ntoken validation, use azure. For authentication using Azure Active\nDirectory (Azure AD) use, azuread.", "$ref": "#/$defs/string" }, "openai_api_version": { - "description": "This is an optional field to specify the OpenAI API version.\nFor Azure OpenAI, this field is required, and is the version of the Azure OpenAI service to\nutilize, specified by a date.\n", + "description": "This is an optional field to specify the OpenAI API version. For Azure\nOpenAI, this field is required, and is the version of the Azure OpenAI\nservice to utilize, specified by a date.", "$ref": "#/$defs/string" }, "openai_deployment_name": { - "description": "This field is only required for Azure OpenAI and is the name of the deployment resource for the\nAzure OpenAI service.\n", + "description": "This field is only required for Azure OpenAI and is the name of the\ndeployment resource for the Azure OpenAI service.", "$ref": "#/$defs/string" }, "openai_organization": { - "description": "This is an optional field to specify the organization in OpenAI or Azure OpenAI.\n", + "description": "This is an optional field to specify the organization in OpenAI or Azure\nOpenAI.", "$ref": "#/$defs/string" } }, @@ -6147,11 +6147,11 @@ "type": "object", "properties": { "palm_api_key": { - "description": "The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.", + "description": "The Databricks secret key reference for a PaLM API key. If you prefer to\npaste your API key directly, see `palm_api_key_plaintext`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string" }, "palm_api_key_plaintext": { - "description": "The PaLM API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following fields: `palm_api_key` or `palm_api_key_plaintext`.", + "description": "The PaLM API key provided as a plaintext string. If you prefer to\nreference your key using Databricks Secrets, see `palm_api_key`. You must\nprovide an API key using one of the following fields: `palm_api_key` or\n`palm_api_key_plaintext`.", "$ref": "#/$defs/string" } }, @@ -6170,7 +6170,7 @@ "properties": { "calls": { "description": "Used to specify how many calls are allowed for a key within the renewal_period.", - "$ref": "#/$defs/int" + "$ref": "#/$defs/int64" }, "key": { "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", @@ -6197,7 +6197,6 @@ "oneOf": [ { "type": "string", - "description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.", "enum": [ "user", "endpoint" @@ -6213,7 +6212,6 @@ "oneOf": [ { "type": "string", - "description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.", "enum": [ "minute" ] @@ -6256,19 +6254,18 @@ "type": "object", "properties": { "entity_name": { - "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),\nor a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of\n__catalog_name__.__schema_name__.__model_name__.\n", + "description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of **catalog_name.schema_name.model_name**.", "$ref": "#/$defs/string" }, "entity_version": { - "description": "The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.", "$ref": "#/$defs/string" }, "environment_vars": { - "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", + "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string" }, "external_model": { - "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,\nit cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.\nThe task type of all external models within an endpoint must be the same.\n", + "description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later. The task type of all external models within an endpoint must be the same.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel" }, "instance_profile_arn": { @@ -6284,7 +6281,7 @@ "$ref": "#/$defs/int" }, "name": { - "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.\nIf not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other\nentities, it defaults to \u003centity-name\u003e-\u003centity-version\u003e.\n", + "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string" }, "scale_to_zero_enabled": { @@ -6292,12 +6289,12 @@ "$ref": "#/$defs/bool" }, "workload_size": { - "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.\n", + "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.", "$ref": "#/$defs/string" }, "workload_type": { - "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n", - "$ref": "#/$defs/string" + "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServingModelWorkloadType" } }, "additionalProperties": false @@ -6314,11 +6311,11 @@ "type": "object", "properties": { "environment_vars": { - "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", + "description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`", "$ref": "#/$defs/map/string" }, "instance_profile_arn": { - "description": "ARN of the instance profile that the served model will use to access AWS resources.", + "description": "ARN of the instance profile that the served entity uses to access AWS resources.", "$ref": "#/$defs/string" }, "max_provisioned_throughput": { @@ -6330,27 +6327,25 @@ "$ref": "#/$defs/int" }, "model_name": { - "description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n", "$ref": "#/$defs/string" }, "model_version": { - "description": "The version of the model in Databricks Model Registry or Unity Catalog to be served.", "$ref": "#/$defs/string" }, "name": { - "description": "The name of a served model. It must be unique across an endpoint. If not specified, this field will default to \u003cmodel-name\u003e-\u003cmodel-version\u003e.\nA served model name can consist of alphanumeric characters, dashes, and underscores.\n", + "description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores. If not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other entities, it defaults to entity_name-entity_version.", "$ref": "#/$defs/string" }, "scale_to_zero_enabled": { - "description": "Whether the compute resources for the served model should scale down to zero.", + "description": "Whether the compute resources for the served entity should scale down to zero.", "$ref": "#/$defs/bool" }, "workload_size": { - "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n", + "description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize" }, "workload_type": { - "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n", + "description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is \"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types](https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType" } }, @@ -6371,7 +6366,6 @@ "oneOf": [ { "type": "string", - "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n", "enum": [ "Small", "Medium", @@ -6388,11 +6382,28 @@ "oneOf": [ { "type": "string", - "description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n", "enum": [ "CPU", - "GPU_SMALL", "GPU_MEDIUM", + "GPU_SMALL", + "GPU_LARGE", + "MULTIGPU_MEDIUM" + ] + }, + { + "type": "string", + "pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}" + } + ] + }, + "serving.ServingModelWorkloadType": { + "oneOf": [ + { + "type": "string", + "enum": [ + "CPU", + "GPU_MEDIUM", + "GPU_SMALL", "GPU_LARGE", "MULTIGPU_MEDIUM" ] diff --git a/cmd/account/custom-app-integration/custom-app-integration.go b/cmd/account/custom-app-integration/custom-app-integration.go index 1eec1018e..43e458bc6 100755 --- a/cmd/account/custom-app-integration/custom-app-integration.go +++ b/cmd/account/custom-app-integration/custom-app-integration.go @@ -307,6 +307,7 @@ func newUpdate() *cobra.Command { cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: redirect_urls + // TODO: array: scopes // TODO: complex arg: token_access_policy cmd.Use = "update INTEGRATION_ID" diff --git a/cmd/api/api.go b/cmd/api/api.go index c3a3eb0b6..fad8a026f 100644 --- a/cmd/api/api.go +++ b/cmd/api/api.go @@ -62,7 +62,7 @@ func makeCommand(method string) *cobra.Command { var response any headers := map[string]string{"Content-Type": "application/json"} - err = api.Do(cmd.Context(), method, path, headers, request, &response) + err = api.Do(cmd.Context(), method, path, headers, nil, request, &response) if err != nil { return err } diff --git a/cmd/workspace/access-control/access-control.go b/cmd/workspace/access-control/access-control.go new file mode 100755 index 000000000..7668265fb --- /dev/null +++ b/cmd/workspace/access-control/access-control.go @@ -0,0 +1,109 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package access_control + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "access-control", + Short: `Rule based Access Control for Databricks Resources.`, + Long: `Rule based Access Control for Databricks Resources.`, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Add methods + cmd.AddCommand(newCheckPolicy()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start check-policy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var checkPolicyOverrides []func( + *cobra.Command, + *iam.CheckPolicyRequest, +) + +func newCheckPolicy() *cobra.Command { + cmd := &cobra.Command{} + + var checkPolicyReq iam.CheckPolicyRequest + var checkPolicyJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&checkPolicyJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: complex arg: resource_info + + cmd.Use = "check-policy" + cmd.Short = `Check access policy to a resource.` + cmd.Long = `Check access policy to a resource.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := checkPolicyJson.Unmarshal(&checkPolicyReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.AccessControl.CheckPolicy(ctx, checkPolicyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range checkPolicyOverrides { + fn(cmd, &checkPolicyReq) + } + + return cmd +} + +// end service AccessControl diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index f07d0cf76..c447bd736 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -3,6 +3,7 @@ package workspace import ( + access_control "github.com/databricks/cli/cmd/workspace/access-control" alerts "github.com/databricks/cli/cmd/workspace/alerts" alerts_legacy "github.com/databricks/cli/cmd/workspace/alerts-legacy" apps "github.com/databricks/cli/cmd/workspace/apps" @@ -96,6 +97,7 @@ import ( func All() []*cobra.Command { var out []*cobra.Command + out = append(out, access_control.New()) out = append(out, alerts.New()) out = append(out, alerts_legacy.New()) out = append(out, apps.New()) diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go index 504beac5e..4d6262cff 100755 --- a/cmd/workspace/providers/providers.go +++ b/cmd/workspace/providers/providers.go @@ -64,7 +64,7 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`) - cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) + cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`) cmd.Use = "create NAME AUTHENTICATION_TYPE" cmd.Short = `Create an auth provider.` @@ -430,7 +430,7 @@ func newUpdate() *cobra.Command { cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`) cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the provider.`) cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`) - cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) + cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS** or not provided.`) cmd.Use = "update NAME" cmd.Short = `Update a provider.` diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go index 56abd2014..6d6ce42f1 100755 --- a/cmd/workspace/recipients/recipients.go +++ b/cmd/workspace/recipients/recipients.go @@ -91,7 +91,7 @@ func newCreate() *cobra.Command { cmd.Long = `Create a share recipient. Creates a new recipient with the delta sharing authentication type in the - metastore. The caller must be a metastore admin or has the + metastore. The caller must be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore. Arguments: @@ -186,28 +186,16 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the recipient") - } deleteReq.Name = args[0] err = w.Recipients.Delete(ctx, deleteReq) @@ -258,28 +246,16 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the recipient") - } getReq.Name = args[0] response, err := w.Recipients.Get(ctx, getReq) @@ -384,7 +360,7 @@ func newRotateToken() *cobra.Command { the provided token info. The caller must be the owner of the recipient. Arguments: - NAME: The name of the recipient. + NAME: The name of the Recipient. EXISTING_TOKEN_EXPIRE_IN_SECONDS: The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire the existing token @@ -479,28 +455,16 @@ func newSharePermissions() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The name of the Recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the name of the recipient") - } sharePermissionsReq.Name = args[0] response, err := w.Recipients.SharePermissions(ctx, sharePermissionsReq) @@ -560,6 +524,11 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -577,30 +546,13 @@ func newUpdate() *cobra.Command { } } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the recipient") - } updateReq.Name = args[0] - err = w.Recipients.Update(ctx, updateReq) + response, err := w.Recipients.Update(ctx, updateReq) if err != nil { return err } - return nil + return cmdio.Render(ctx, response) } // Disable completions since they are not applicable. diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go index cc99177c7..034133623 100755 --- a/cmd/workspace/serving-endpoints/serving-endpoints.go +++ b/cmd/workspace/serving-endpoints/serving-endpoints.go @@ -49,6 +49,7 @@ func New() *cobra.Command { cmd.AddCommand(newGetOpenApi()) cmd.AddCommand(newGetPermissionLevels()) cmd.AddCommand(newGetPermissions()) + cmd.AddCommand(newHttpRequest()) cmd.AddCommand(newList()) cmd.AddCommand(newLogs()) cmd.AddCommand(newPatch()) @@ -153,16 +154,34 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: ai_gateway + // TODO: complex arg: config // TODO: array: rate_limits cmd.Flags().BoolVar(&createReq.RouteOptimized, "route-optimized", createReq.RouteOptimized, `Enable route optimization for the serving endpoint.`) // TODO: array: tags - cmd.Use = "create" + cmd.Use = "create NAME" cmd.Short = `Create a new serving endpoint.` - cmd.Long = `Create a new serving endpoint.` + cmd.Long = `Create a new serving endpoint. + + Arguments: + NAME: The name of the serving endpoint. This field is required and must be + unique across a Databricks workspace. An endpoint name can consist of + alphanumeric characters, dashes, and underscores.` cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(0)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide 'name' in your JSON input") + } + return nil + } + check := root.ExactArgs(1) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -179,8 +198,9 @@ func newCreate() *cobra.Command { return err } } - } else { - return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + if !cmd.Flags().Changed("json") { + createReq.Name = args[0] } wait, err := w.ServingEndpoints.Create(ctx, createReq) @@ -233,10 +253,7 @@ func newDelete() *cobra.Command { cmd.Use = "delete NAME" cmd.Short = `Delete a serving endpoint.` - cmd.Long = `Delete a serving endpoint. - - Arguments: - NAME: The name of the serving endpoint. This field is required.` + cmd.Long = `Delete a serving endpoint.` cmd.Annotations = make(map[string]string) @@ -432,11 +449,12 @@ func newGetOpenApi() *cobra.Command { getOpenApiReq.Name = args[0] - err = w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq) + response, err := w.ServingEndpoints.GetOpenApi(ctx, getOpenApiReq) if err != nil { return err } - return nil + defer response.Contents.Close() + return cmdio.Render(ctx, response.Contents) } // Disable completions since they are not applicable. @@ -568,6 +586,77 @@ func newGetPermissions() *cobra.Command { return cmd } +// start http-request command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var httpRequestOverrides []func( + *cobra.Command, + *serving.ExternalFunctionRequest, +) + +func newHttpRequest() *cobra.Command { + cmd := &cobra.Command{} + + var httpRequestReq serving.ExternalFunctionRequest + + // TODO: short flags + + cmd.Flags().StringVar(&httpRequestReq.Headers, "headers", httpRequestReq.Headers, `Additional headers for the request.`) + cmd.Flags().StringVar(&httpRequestReq.Json, "json", httpRequestReq.Json, `The JSON payload to send in the request body.`) + cmd.Flags().StringVar(&httpRequestReq.Params, "params", httpRequestReq.Params, `Query parameters for the request.`) + + cmd.Use = "http-request CONNECTION_NAME METHOD PATH" + cmd.Short = `Make external services call using the credentials stored in UC Connection.` + cmd.Long = `Make external services call using the credentials stored in UC Connection. + + Arguments: + CONNECTION_NAME: The connection name to use. This is required to identify the external + connection. + METHOD: The HTTP method to use (e.g., 'GET', 'POST'). + PATH: The relative path for the API endpoint. This is required.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + httpRequestReq.ConnectionName = args[0] + _, err = fmt.Sscan(args[1], &httpRequestReq.Method) + if err != nil { + return fmt.Errorf("invalid METHOD: %s", args[1]) + } + httpRequestReq.Path = args[2] + + response, err := w.ServingEndpoints.HttpRequest(ctx, httpRequestReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range httpRequestOverrides { + fn(cmd, &httpRequestReq) + } + + return cmd +} + // start list command // Slice with functions to override default command behavior. @@ -849,7 +938,7 @@ func newPutAiGateway() *cobra.Command { cmd.Long = `Update AI Gateway of a serving endpoint. Used to update the AI Gateway of a serving endpoint. NOTE: Only external model - endpoints are currently supported. + and provisioned throughput endpoints are currently supported. Arguments: NAME: The name of the serving endpoint whose AI Gateway is being updated. This diff --git a/go.mod b/go.mod index 930963f89..bd8997190 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/BurntSushi/toml v1.4.0 // MIT github.com/Masterminds/semver/v3 v3.3.1 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.55.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.56.1 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/hashicorp/go-version v1.7.0 // MPL 2.0 diff --git a/go.sum b/go.sum index d025b3947..dec1d40b2 100644 --- a/go.sum +++ b/go.sum @@ -34,8 +34,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo= github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= -github.com/databricks/databricks-sdk-go v0.55.0 h1:ReziD6spzTDltM0ml80LggKo27F3oUjgTinCFDJDnak= -github.com/databricks/databricks-sdk-go v0.55.0/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI= +github.com/databricks/databricks-sdk-go v0.56.1 h1:sgweTRvAQaI8EPrfDnVdAB0lNX6L5uTT720SlMMQI2U= +github.com/databricks/databricks-sdk-go v0.56.1/go.mod h1:JpLizplEs+up9/Z4Xf2x++o3sM9eTTWFGzIXAptKJzI= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/integration/cmd/sync/sync_test.go b/integration/cmd/sync/sync_test.go index 632497054..88e6ed89a 100644 --- a/integration/cmd/sync/sync_test.go +++ b/integration/cmd/sync/sync_test.go @@ -158,7 +158,7 @@ func (a *syncTest) remoteFileContent(ctx context.Context, relativePath, expected var res []byte a.c.Eventually(func() bool { - err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &res) + err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &res) require.NoError(a.t, err) actualContent := string(res) return actualContent == expectedContent diff --git a/libs/filer/files_client.go b/libs/filer/files_client.go index 88bbadd32..7102b6e29 100644 --- a/libs/filer/files_client.go +++ b/libs/filer/files_client.go @@ -148,7 +148,7 @@ func (w *FilesClient) Write(ctx context.Context, name string, reader io.Reader, overwrite := slices.Contains(mode, OverwriteIfExists) urlPath = fmt.Sprintf("%s?overwrite=%t", urlPath, overwrite) headers := map[string]string{"Content-Type": "application/octet-stream"} - err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, reader, nil) + err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, nil, reader, nil) // Return early on success. if err == nil { @@ -176,7 +176,7 @@ func (w *FilesClient) Read(ctx context.Context, name string) (io.ReadCloser, err } var reader io.ReadCloser - err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &reader) + err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, nil, &reader) // Return early on success. if err == nil { diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go index 8d5148edd..1d514f13b 100644 --- a/libs/filer/workspace_files_client.go +++ b/libs/filer/workspace_files_client.go @@ -106,7 +106,7 @@ func (info *wsfsFileInfo) MarshalJSON() ([]byte, error) { // as an interface to allow for mocking in tests. type apiClient interface { Do(ctx context.Context, method, path string, - headers map[string]string, request, response any, + headers map[string]string, queryString map[string]any, request, response any, visitors ...func(*http.Request) error) error } @@ -156,7 +156,7 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io return err } - err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, body, nil) + err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, nil, body, nil) // Return early on success. if err == nil { @@ -341,6 +341,7 @@ func (w *WorkspaceFilesClient) Stat(ctx context.Context, name string) (fs.FileIn http.MethodGet, "/api/2.0/workspace/get-status", nil, + nil, map[string]string{ "path": absPath, "return_export_info": "true", diff --git a/libs/filer/workspace_files_extensions_client_test.go b/libs/filer/workspace_files_extensions_client_test.go index 9ea837fa9..e9fde4762 100644 --- a/libs/filer/workspace_files_extensions_client_test.go +++ b/libs/filer/workspace_files_extensions_client_test.go @@ -17,7 +17,7 @@ type mockApiClient struct { } func (m *mockApiClient) Do(ctx context.Context, method, path string, - headers map[string]string, request, response any, + headers map[string]string, queryString map[string]any, request, response any, visitors ...func(*http.Request) error, ) error { args := m.Called(ctx, method, path, headers, request, response, visitors) diff --git a/libs/git/info.go b/libs/git/info.go index 46e57be48..dc4af9b6d 100644 --- a/libs/git/info.go +++ b/libs/git/info.go @@ -66,6 +66,7 @@ func fetchRepositoryInfoAPI(ctx context.Context, path string, w *databricks.Work http.MethodGet, apiEndpoint, nil, + nil, map[string]string{ "path": path, "return_git_info": "true", From 65fbbd9a7c75a2404fa3d4956560ab037535d779 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 14:22:08 +0100 Subject: [PATCH 14/25] libs/python: Remove DetectInterpreters (#2234) ## Changes - Remove DetectInterpreters from DetectExecutable call: python3 or python should always be on on the PATH. We don't need to detect non-standard situations like python3.10 is present but python3 is not. - I moved DetectInterpreters to cmd/labs where it is still used. This is a follow up to https://github.com/databricks/cli/pull/2034 ## Tests Existing tests. --- cmd/labs/project/installer.go | 3 +-- .../labs/project}/interpreters.go | 2 +- .../labs/project}/interpreters_unix_test.go | 2 +- .../labs/project}/interpreters_win_test.go | 2 +- .../testdata/other-binaries-filtered/python | 0 .../other-binaries-filtered/python3-whatever | 0 .../other-binaries-filtered/python3.10 | 0 .../other-binaries-filtered/python3.10.100 | 0 .../other-binaries-filtered/python3.11 | 0 .../other-binaries-filtered/python4.8 | 0 .../testdata/other-binaries-filtered/python5 | 0 .../testdata/other-binaries-filtered/python6 | 0 .../testdata/other-binaries-filtered/python7 | 0 .../testdata/other-binaries-filtered/pythonw | 0 .../other-binaries-filtered/real-python3.11.4 | 0 .../testdata/other-binaries-filtered/whatever | 0 .../testdata/world-writeable/python8.4 | 0 libs/python/detect.go | 22 +------------------ libs/python/detect_unix_test.go | 12 ++-------- libs/python/detect_win_test.go | 2 +- 20 files changed, 8 insertions(+), 37 deletions(-) rename {libs/python => cmd/labs/project}/interpreters.go (99%) rename {libs/python => cmd/labs/project}/interpreters_unix_test.go (99%) rename {libs/python => cmd/labs/project}/interpreters_win_test.go (97%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3-whatever (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3.10 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3.10.100 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python3.11 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python4.8 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python5 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python6 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/python7 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/pythonw (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/real-python3.11.4 (100%) rename {libs/python => cmd/labs/project}/testdata/other-binaries-filtered/whatever (100%) rename {libs/python => cmd/labs/project}/testdata/world-writeable/python8.4 (100%) diff --git a/cmd/labs/project/installer.go b/cmd/labs/project/installer.go index 7d31623bb..05f7d68aa 100644 --- a/cmd/labs/project/installer.go +++ b/cmd/labs/project/installer.go @@ -15,7 +15,6 @@ import ( "github.com/databricks/cli/libs/databrickscfg/profile" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/process" - "github.com/databricks/cli/libs/python" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/sql" @@ -223,7 +222,7 @@ func (i *installer) setupPythonVirtualEnvironment(ctx context.Context, w *databr feedback := cmdio.Spinner(ctx) defer close(feedback) feedback <- "Detecting all installed Python interpreters on the system" - pythonInterpreters, err := python.DetectInterpreters(ctx) + pythonInterpreters, err := DetectInterpreters(ctx) if err != nil { return fmt.Errorf("detect: %w", err) } diff --git a/libs/python/interpreters.go b/cmd/labs/project/interpreters.go similarity index 99% rename from libs/python/interpreters.go rename to cmd/labs/project/interpreters.go index 6071309a8..00f099ed4 100644 --- a/libs/python/interpreters.go +++ b/cmd/labs/project/interpreters.go @@ -1,4 +1,4 @@ -package python +package project import ( "context" diff --git a/libs/python/interpreters_unix_test.go b/cmd/labs/project/interpreters_unix_test.go similarity index 99% rename from libs/python/interpreters_unix_test.go rename to cmd/labs/project/interpreters_unix_test.go index 57adc9279..a5bbb6468 100644 --- a/libs/python/interpreters_unix_test.go +++ b/cmd/labs/project/interpreters_unix_test.go @@ -1,6 +1,6 @@ //go:build unix -package python +package project import ( "context" diff --git a/libs/python/interpreters_win_test.go b/cmd/labs/project/interpreters_win_test.go similarity index 97% rename from libs/python/interpreters_win_test.go rename to cmd/labs/project/interpreters_win_test.go index f99981529..2316daa30 100644 --- a/libs/python/interpreters_win_test.go +++ b/cmd/labs/project/interpreters_win_test.go @@ -1,6 +1,6 @@ //go:build windows -package python +package project import ( "context" diff --git a/libs/python/testdata/other-binaries-filtered/python b/cmd/labs/project/testdata/other-binaries-filtered/python similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python rename to cmd/labs/project/testdata/other-binaries-filtered/python diff --git a/libs/python/testdata/other-binaries-filtered/python3-whatever b/cmd/labs/project/testdata/other-binaries-filtered/python3-whatever similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python3-whatever rename to cmd/labs/project/testdata/other-binaries-filtered/python3-whatever diff --git a/libs/python/testdata/other-binaries-filtered/python3.10 b/cmd/labs/project/testdata/other-binaries-filtered/python3.10 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python3.10 rename to cmd/labs/project/testdata/other-binaries-filtered/python3.10 diff --git a/libs/python/testdata/other-binaries-filtered/python3.10.100 b/cmd/labs/project/testdata/other-binaries-filtered/python3.10.100 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python3.10.100 rename to cmd/labs/project/testdata/other-binaries-filtered/python3.10.100 diff --git a/libs/python/testdata/other-binaries-filtered/python3.11 b/cmd/labs/project/testdata/other-binaries-filtered/python3.11 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python3.11 rename to cmd/labs/project/testdata/other-binaries-filtered/python3.11 diff --git a/libs/python/testdata/other-binaries-filtered/python4.8 b/cmd/labs/project/testdata/other-binaries-filtered/python4.8 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python4.8 rename to cmd/labs/project/testdata/other-binaries-filtered/python4.8 diff --git a/libs/python/testdata/other-binaries-filtered/python5 b/cmd/labs/project/testdata/other-binaries-filtered/python5 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python5 rename to cmd/labs/project/testdata/other-binaries-filtered/python5 diff --git a/libs/python/testdata/other-binaries-filtered/python6 b/cmd/labs/project/testdata/other-binaries-filtered/python6 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python6 rename to cmd/labs/project/testdata/other-binaries-filtered/python6 diff --git a/libs/python/testdata/other-binaries-filtered/python7 b/cmd/labs/project/testdata/other-binaries-filtered/python7 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/python7 rename to cmd/labs/project/testdata/other-binaries-filtered/python7 diff --git a/libs/python/testdata/other-binaries-filtered/pythonw b/cmd/labs/project/testdata/other-binaries-filtered/pythonw similarity index 100% rename from libs/python/testdata/other-binaries-filtered/pythonw rename to cmd/labs/project/testdata/other-binaries-filtered/pythonw diff --git a/libs/python/testdata/other-binaries-filtered/real-python3.11.4 b/cmd/labs/project/testdata/other-binaries-filtered/real-python3.11.4 similarity index 100% rename from libs/python/testdata/other-binaries-filtered/real-python3.11.4 rename to cmd/labs/project/testdata/other-binaries-filtered/real-python3.11.4 diff --git a/libs/python/testdata/other-binaries-filtered/whatever b/cmd/labs/project/testdata/other-binaries-filtered/whatever similarity index 100% rename from libs/python/testdata/other-binaries-filtered/whatever rename to cmd/labs/project/testdata/other-binaries-filtered/whatever diff --git a/libs/python/testdata/world-writeable/python8.4 b/cmd/labs/project/testdata/world-writeable/python8.4 similarity index 100% rename from libs/python/testdata/world-writeable/python8.4 rename to cmd/labs/project/testdata/world-writeable/python8.4 diff --git a/libs/python/detect.go b/libs/python/detect.go index e86d9d621..75158da65 100644 --- a/libs/python/detect.go +++ b/libs/python/detect.go @@ -39,27 +39,7 @@ func DetectExecutable(ctx context.Context) (string, error) { // // See https://github.com/pyenv/pyenv#understanding-python-version-selection - out, err := exec.LookPath(GetExecutable()) - - // most of the OS'es have python3 in $PATH, but for those which don't, - // we perform the latest version lookup - if err != nil && !errors.Is(err, exec.ErrNotFound) { - return "", err - } - if out != "" { - return out, nil - } - // otherwise, detect all interpreters and pick the least that satisfies - // minimal version requirements - all, err := DetectInterpreters(ctx) - if err != nil { - return "", err - } - interpreter, err := all.AtLeast("3.8") - if err != nil { - return "", err - } - return interpreter.Path, nil + return exec.LookPath(GetExecutable()) } // DetectVEnvExecutable returns the path to the python3 executable inside venvPath, diff --git a/libs/python/detect_unix_test.go b/libs/python/detect_unix_test.go index a962e1f55..1774aa108 100644 --- a/libs/python/detect_unix_test.go +++ b/libs/python/detect_unix_test.go @@ -16,24 +16,16 @@ func TestDetectsViaPathLookup(t *testing.T) { assert.NotEmpty(t, py) } -func TestDetectsViaListing(t *testing.T) { - t.Setenv("PATH", "testdata/other-binaries-filtered") - ctx := context.Background() - py, err := DetectExecutable(ctx) - assert.NoError(t, err) - assert.Equal(t, "testdata/other-binaries-filtered/python3.10", py) -} - func TestDetectFailsNoInterpreters(t *testing.T) { t.Setenv("PATH", "testdata") ctx := context.Background() _, err := DetectExecutable(ctx) - assert.Equal(t, ErrNoPythonInterpreters, err) + assert.Error(t, err) } func TestDetectFailsNoMinimalVersion(t *testing.T) { t.Setenv("PATH", "testdata/no-python3") ctx := context.Background() _, err := DetectExecutable(ctx) - assert.EqualError(t, err, "cannot find Python greater or equal to v3.8.0") + assert.Error(t, err) } diff --git a/libs/python/detect_win_test.go b/libs/python/detect_win_test.go index 2ef811a4b..7b2ee281e 100644 --- a/libs/python/detect_win_test.go +++ b/libs/python/detect_win_test.go @@ -20,5 +20,5 @@ func TestDetectFailsNoInterpreters(t *testing.T) { t.Setenv("PATH", "testdata") ctx := context.Background() _, err := DetectExecutable(ctx) - assert.ErrorIs(t, err, ErrNoPythonInterpreters) + assert.Error(t, err) } From 52bf7e388a80beb95d248dc623cfda3cf5d5e137 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 15:25:56 +0100 Subject: [PATCH 15/25] acc: Propagate user's UV_CACHE_DIR to tests (#2239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit There is a speed up in 0.5s but it is still 4.4s, so something else is slow there. Benchmarking bundle/templates/experimental-jobs-as-code: ``` # Without UV_CACHE_DIR ~/work/cli/acceptance/bundle/templates/experimental-jobs-as-code % hyperfine --warmup 2 'testme -count=1' Benchmark 1: testme -count=1 Time (mean ± σ): 4.950 s ± 0.079 s [User: 2.730 s, System: 8.524 s] Range (min … max): 4.838 s … 5.076 s 10 runs # With UV_CACHE_DIR ~/work/cli/acceptance/bundle/templates/experimental-jobs-as-code % hyperfine --warmup 2 'testme -count=1' Benchmark 1: testme -count=1 Time (mean ± σ): 4.410 s ± 0.049 s [User: 2.669 s, System: 8.710 s] Range (min … max): 4.324 s … 4.467 s 10 runs ``` --- acceptance/acceptance_test.go | 17 +++++++++++++++++ .../templates/experimental-jobs-as-code/script | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index e48bd9908..47295b47a 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -100,6 +100,10 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int { // Prevent CLI from downloading terraform in each test: t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir) + // Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically + uvCache := getUVDefaultCacheDir(t) + t.Setenv("UV_CACHE_DIR", uvCache) + ctx := context.Background() cloudEnv := os.Getenv("CLOUD_ENV") @@ -486,3 +490,16 @@ func ListDir(t *testing.T, src string) []string { } return files } + +func getUVDefaultCacheDir(t *testing.T) string { + // According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration + // the default cache directory is + // "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows" + cacheDir, err := os.UserCacheDir() + require.NoError(t, err) + if runtime.GOOS == "windows" { + return cacheDir + "\\uv\\cache" + } else { + return cacheDir + "/uv" + } +} diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/script b/acceptance/bundle/templates/experimental-jobs-as-code/script index af28b9d0a..0223b3326 100644 --- a/acceptance/bundle/templates/experimental-jobs-as-code/script +++ b/acceptance/bundle/templates/experimental-jobs-as-code/script @@ -3,7 +3,7 @@ trace $CLI bundle init experimental-jobs-as-code --config-file ./input.json --ou cd output/my_jobs_as_code # silence uv output because it's non-deterministic -uv sync 2> /dev/null +uv sync -q # remove version constraint because it always creates a warning on dev builds cat databricks.yml | grep -v databricks_cli_version > databricks.yml.new From 67d1413db5b84df6643f3c1571abae13da14c6e2 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 15:34:53 +0100 Subject: [PATCH 16/25] Add default regex for DEV_VERSION (#2241) ## Changes - Replace development version with $DEV_VERSION - Update experimental-jobs-as-code to make use of it. ## Tests - Existing tests. - Using this in https://github.com/databricks/cli/pull/2213 --- acceptance/acceptance_test.go | 1 + .../bundle/templates/experimental-jobs-as-code/output.txt | 2 ++ .../output/my_jobs_as_code/databricks.yml | 1 + .../bundle/templates/experimental-jobs-as-code/script | 4 ---- acceptance/selftest/output.txt | 4 ++++ acceptance/selftest/script | 3 +++ libs/testdiff/replacement.go | 7 +++++++ 7 files changed, 18 insertions(+), 4 deletions(-) diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 47295b47a..5eb08f674 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -128,6 +128,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int { testdiff.PrepareReplacementsUser(t, &repls, *user) testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient) testdiff.PrepareReplacementsUUID(t, &repls) + testdiff.PrepareReplacementsDevVersion(t, &repls) testDirs := getTests(t) require.NotEmpty(t, testDirs) diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output.txt b/acceptance/bundle/templates/experimental-jobs-as-code/output.txt index 1aa8a94d5..10aca003e 100644 --- a/acceptance/bundle/templates/experimental-jobs-as-code/output.txt +++ b/acceptance/bundle/templates/experimental-jobs-as-code/output.txt @@ -10,6 +10,8 @@ Please refer to the README.md file for "getting started" instructions. See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. >>> $CLI bundle validate -t dev --output json +Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: $DEV_VERSION + { "jobs": { "my_jobs_as_code_job": { diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml index fd87aa381..a1a93d95c 100644 --- a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml +++ b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml @@ -3,6 +3,7 @@ bundle: name: my_jobs_as_code uuid: + databricks_cli_version: ">= 0.238.0" experimental: python: diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/script b/acceptance/bundle/templates/experimental-jobs-as-code/script index 0223b3326..10188aabd 100644 --- a/acceptance/bundle/templates/experimental-jobs-as-code/script +++ b/acceptance/bundle/templates/experimental-jobs-as-code/script @@ -5,10 +5,6 @@ cd output/my_jobs_as_code # silence uv output because it's non-deterministic uv sync -q -# remove version constraint because it always creates a warning on dev builds -cat databricks.yml | grep -v databricks_cli_version > databricks.yml.new -mv databricks.yml.new databricks.yml - trace $CLI bundle validate -t dev --output json | jq ".resources" rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info diff --git a/acceptance/selftest/output.txt b/acceptance/selftest/output.txt index 9fdfbc1e7..91aa8c33e 100644 --- a/acceptance/selftest/output.txt +++ b/acceptance/selftest/output.txt @@ -33,3 +33,7 @@ $TMPDIR/subdir/a/b/c 1234 CUSTOM_NUMBER_REGEX 123456 + +=== Testing --version +>>> $CLI --version +Databricks CLI v$DEV_VERSION diff --git a/acceptance/selftest/script b/acceptance/selftest/script index 665726167..bccf30e71 100644 --- a/acceptance/selftest/script +++ b/acceptance/selftest/script @@ -24,3 +24,6 @@ printf "\n=== Custom regex can be specified in [[Repl]] section\n" echo 1234 echo 12345 echo 123456 + +printf "\n=== Testing --version" +trace $CLI --version diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go index b512374a3..40e7e72b4 100644 --- a/libs/testdiff/replacement.go +++ b/libs/testdiff/replacement.go @@ -23,6 +23,8 @@ var ( uuidRegex = regexp.MustCompile(`[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}`) numIdRegex = regexp.MustCompile(`[0-9]{3,}`) privatePathRegex = regexp.MustCompile(`(/tmp|/private)(/.*)/([a-zA-Z0-9]+)`) + // Version could v0.0.0-dev+21e1aacf518a or just v0.0.0-dev (the latter is currently the case on Windows) + devVersionRegex = regexp.MustCompile(`0\.0\.0-dev(\+[a-f0-9]{10,16})?`) ) type Replacement struct { @@ -211,3 +213,8 @@ func PrepareReplacementsTemporaryDirectory(t testutil.TestingT, r *ReplacementsC t.Helper() r.append(privatePathRegex, "/tmp/.../$3") } + +func PrepareReplacementsDevVersion(t testutil.TestingT, r *ReplacementsContext) { + t.Helper() + r.append(devVersionRegex, "$$DEV_VERSION") +} From be908ee1a17abe36c573a24ac83033243c154379 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Mon, 27 Jan 2025 16:28:33 +0100 Subject: [PATCH 17/25] Add acceptance test for 'experimental.scripts' (#2240) --- acceptance/bundle/scripts/databricks.yml | 11 +++++ acceptance/bundle/scripts/myscript.py | 8 ++++ acceptance/bundle/scripts/output.txt | 52 ++++++++++++++++++++++++ acceptance/bundle/scripts/script | 3 ++ acceptance/server_test.go | 4 ++ bundle/scripts/scripts_test.go | 51 ----------------------- 6 files changed, 78 insertions(+), 51 deletions(-) create mode 100644 acceptance/bundle/scripts/databricks.yml create mode 100644 acceptance/bundle/scripts/myscript.py create mode 100644 acceptance/bundle/scripts/output.txt create mode 100644 acceptance/bundle/scripts/script delete mode 100644 bundle/scripts/scripts_test.go diff --git a/acceptance/bundle/scripts/databricks.yml b/acceptance/bundle/scripts/databricks.yml new file mode 100644 index 000000000..6421e2b59 --- /dev/null +++ b/acceptance/bundle/scripts/databricks.yml @@ -0,0 +1,11 @@ +bundle: + name: scripts + +experimental: + scripts: + preinit: "python3 ./myscript.py $EXITCODE preinit" + postinit: "python3 ./myscript.py 0 postinit" + prebuild: "python3 ./myscript.py 0 prebuild" + postbuild: "python3 ./myscript.py 0 postbuild" + predeploy: "python3 ./myscript.py 0 predeploy" + postdeploy: "python3 ./myscript.py 0 postdeploy" diff --git a/acceptance/bundle/scripts/myscript.py b/acceptance/bundle/scripts/myscript.py new file mode 100644 index 000000000..d10f497e1 --- /dev/null +++ b/acceptance/bundle/scripts/myscript.py @@ -0,0 +1,8 @@ +import sys + +info = " ".join(sys.argv[1:]) +sys.stderr.write(f"from myscript.py {info}: hello stderr!\n") +sys.stdout.write(f"from myscript.py {info}: hello stdout!\n") + +exitcode = int(sys.argv[1]) +sys.exit(exitcode) diff --git a/acceptance/bundle/scripts/output.txt b/acceptance/bundle/scripts/output.txt new file mode 100644 index 000000000..ec5978380 --- /dev/null +++ b/acceptance/bundle/scripts/output.txt @@ -0,0 +1,52 @@ + +>>> EXITCODE=0 errcode $CLI bundle validate +Executing 'preinit' script +from myscript.py 0 preinit: hello stdout! +from myscript.py 0 preinit: hello stderr! +Executing 'postinit' script +from myscript.py 0 postinit: hello stdout! +from myscript.py 0 postinit: hello stderr! +Name: scripts +Target: default +Workspace: + User: $USERNAME + Path: /Workspace/Users/$USERNAME/.bundle/scripts/default + +Validation OK! + +>>> EXITCODE=1 errcode $CLI bundle validate +Executing 'preinit' script +from myscript.py 1 preinit: hello stdout! +from myscript.py 1 preinit: hello stderr! +Error: failed to execute script: exit status 1 + +Name: scripts + +Found 1 error + +Exit code: 1 + +>>> EXITCODE=0 errcode $CLI bundle deploy +Executing 'preinit' script +from myscript.py 0 preinit: hello stdout! +from myscript.py 0 preinit: hello stderr! +Executing 'postinit' script +from myscript.py 0 postinit: hello stdout! +from myscript.py 0 postinit: hello stderr! +Executing 'prebuild' script +from myscript.py 0 prebuild: hello stdout! +from myscript.py 0 prebuild: hello stderr! +Executing 'postbuild' script +from myscript.py 0 postbuild: hello stdout! +from myscript.py 0 postbuild: hello stderr! +Executing 'predeploy' script +from myscript.py 0 predeploy: hello stdout! +from myscript.py 0 predeploy: hello stderr! +Error: unable to deploy to /Workspace/Users/$USERNAME/.bundle/scripts/default/state as $USERNAME. +Please make sure the current user or one of their groups is listed under the permissions of this bundle. +For assistance, contact the owners of this project. +They may need to redeploy the bundle to apply the new permissions. +Please refer to https://docs.databricks.com/dev-tools/bundles/permissions.html for more on managing permissions. + + +Exit code: 1 diff --git a/acceptance/bundle/scripts/script b/acceptance/bundle/scripts/script new file mode 100644 index 000000000..de07d277e --- /dev/null +++ b/acceptance/bundle/scripts/script @@ -0,0 +1,3 @@ +trace EXITCODE=0 errcode $CLI bundle validate +trace EXITCODE=1 errcode $CLI bundle validate +trace EXITCODE=0 errcode $CLI bundle deploy diff --git a/acceptance/server_test.go b/acceptance/server_test.go index eb8cbb24a..dbc55c03f 100644 --- a/acceptance/server_test.go +++ b/acceptance/server_test.go @@ -146,4 +146,8 @@ func AddHandlers(server *TestServer) { }, }, nil }) + + server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, error) { + return "{}", nil + }) } diff --git a/bundle/scripts/scripts_test.go b/bundle/scripts/scripts_test.go deleted file mode 100644 index 0c92bc2c3..000000000 --- a/bundle/scripts/scripts_test.go +++ /dev/null @@ -1,51 +0,0 @@ -package scripts - -import ( - "bufio" - "context" - "strings" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/libs/exec" - "github.com/stretchr/testify/require" -) - -func TestExecutesHook(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Experimental: &config.Experimental{ - Scripts: map[config.ScriptHook]config.Command{ - config.ScriptPreBuild: "echo 'Hello'", - }, - }, - }, - } - - executor, err := exec.NewCommandExecutor(b.BundleRootPath) - require.NoError(t, err) - _, out, err := executeHook(context.Background(), executor, b, config.ScriptPreBuild) - require.NoError(t, err) - - reader := bufio.NewReader(out) - line, err := reader.ReadString('\n') - - require.NoError(t, err) - require.Equal(t, "Hello", strings.TrimSpace(line)) -} - -func TestExecuteMutator(t *testing.T) { - b := &bundle.Bundle{ - Config: config.Root{ - Experimental: &config.Experimental{ - Scripts: map[config.ScriptHook]config.Command{ - config.ScriptPreBuild: "echo 'Hello'", - }, - }, - }, - } - - diags := bundle.Apply(context.Background(), b, Execute(config.ScriptPreInit)) - require.NoError(t, diags.Error()) -} From 60709e3d48a711b931d341196120f4450ee78499 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Tue, 28 Jan 2025 11:15:32 +0100 Subject: [PATCH 18/25] acc: Restore unexpected output error (#2243) ## Changes Restore original behaviour of acceptance tests: any unaccounted for files trigger an error (not just those that start with "out"). This got changed in https://github.com/databricks/cli/pull/2146/files#diff-2bb968d823f4afb825e1dcea2879bdbdedf2b7c15d4e77f47905691b14246a04L196 which started only checking files starting with "out*" and skipping everything else. ## Tests Existing tests. --- acceptance/acceptance_test.go | 1 + acceptance/bundle/git-permerror/script | 3 ++- acceptance/bundle/syncroot/dotdot-git/script | 4 +++- acceptance/bundle/syncroot/dotdot-git/test.toml | 3 +++ 4 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 acceptance/bundle/syncroot/dotdot-git/test.toml diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 5eb08f674..2d67fb269 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -245,6 +245,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont if _, ok := outputs[relPath]; ok { continue } + t.Errorf("Unexpected output: %s", relPath) if strings.HasPrefix(relPath, "out") { // We have a new file starting with "out" // Show the contents & support overwrite mode for it: diff --git a/acceptance/bundle/git-permerror/script b/acceptance/bundle/git-permerror/script index 782cbf5bc..3a9b4db24 100644 --- a/acceptance/bundle/git-permerror/script +++ b/acceptance/bundle/git-permerror/script @@ -22,4 +22,5 @@ trace chmod 000 .git/config errcode trace $CLI bundle validate -o json | jq .bundle.git errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git -rm -fr .git +cd .. +rm -fr myrepo diff --git a/acceptance/bundle/syncroot/dotdot-git/script b/acceptance/bundle/syncroot/dotdot-git/script index 0706a1d5e..278e77101 100644 --- a/acceptance/bundle/syncroot/dotdot-git/script +++ b/acceptance/bundle/syncroot/dotdot-git/script @@ -3,4 +3,6 @@ mkdir myrepo cd myrepo cp ../databricks.yml . git-repo-init -$CLI bundle validate | sed 's/\\\\/\//g' +errcode $CLI bundle validate +cd .. +rm -fr myrepo diff --git a/acceptance/bundle/syncroot/dotdot-git/test.toml b/acceptance/bundle/syncroot/dotdot-git/test.toml new file mode 100644 index 000000000..f57f83ee4 --- /dev/null +++ b/acceptance/bundle/syncroot/dotdot-git/test.toml @@ -0,0 +1,3 @@ +[[Repls]] +Old = '\\\\myrepo' +New = '/myrepo' From 11436faafe5361bd390fa04dc699807e31db6144 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Tue, 28 Jan 2025 11:22:29 +0100 Subject: [PATCH 19/25] acc: Avoid reading and applying replacements on large files; validate utf8 (#2244) ## Changes - Do not start replacement / comparison if file is too large or not valid utf-8. - This helps to prevent replacements if there is accidentally a large binary (e.g. terraform). ## Tests Found this problem when working on https://github.com/databricks/cli/pull/2242 -- the tests tried to applied replacements on terraform binary and crashed. With this change, an error is reported instead. --- acceptance/acceptance_test.go | 51 ++++++++++++++++++++++++----------- 1 file changed, 35 insertions(+), 16 deletions(-) diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 2d67fb269..877c7239d 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -15,6 +15,7 @@ import ( "strings" "testing" "time" + "unicode/utf8" "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/env" @@ -44,6 +45,7 @@ const ( EntryPointScript = "script" CleanupScript = "script.cleanup" PrepareScript = "script.prepare" + MaxFileSize = 100_000 ) var Scripts = map[string]bool{ @@ -257,15 +259,15 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) { pathRef := filepath.Join(dirRef, relPath) pathNew := filepath.Join(dirNew, relPath) - bufRef, okRef := readIfExists(t, pathRef) - bufNew, okNew := readIfExists(t, pathNew) + bufRef, okRef := tryReading(t, pathRef) + bufNew, okNew := tryReading(t, pathNew) if !okRef && !okNew { - t.Errorf("Both files are missing: %s, %s", pathRef, pathNew) + t.Errorf("Both files are missing or have errors: %s, %s", pathRef, pathNew) return } - valueRef := testdiff.NormalizeNewlines(string(bufRef)) - valueNew := testdiff.NormalizeNewlines(string(bufNew)) + valueRef := testdiff.NormalizeNewlines(bufRef) + valueNew := testdiff.NormalizeNewlines(bufNew) // Apply replacements to the new value only. // The reference value is stored after applying replacements. @@ -323,14 +325,14 @@ func readMergedScriptContents(t *testing.T, dir string) string { cleanups := []string{} for { - x, ok := readIfExists(t, filepath.Join(dir, CleanupScript)) + x, ok := tryReading(t, filepath.Join(dir, CleanupScript)) if ok { - cleanups = append(cleanups, string(x)) + cleanups = append(cleanups, x) } - x, ok = readIfExists(t, filepath.Join(dir, PrepareScript)) + x, ok = tryReading(t, filepath.Join(dir, PrepareScript)) if ok { - prepares = append(prepares, string(x)) + prepares = append(prepares, x) } if dir == "" || dir == "." { @@ -417,16 +419,33 @@ func formatOutput(w io.Writer, err error) { } } -func readIfExists(t *testing.T, path string) ([]byte, bool) { - data, err := os.ReadFile(path) - if err == nil { - return data, true +func tryReading(t *testing.T, path string) (string, bool) { + info, err := os.Stat(path) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + t.Errorf("%s: %s", path, err) + } + return "", false } - if !errors.Is(err, os.ErrNotExist) { - t.Fatalf("%s: %s", path, err) + if info.Size() > MaxFileSize { + t.Errorf("%s: ignoring, too large: %d", path, info.Size()) + return "", false } - return []byte{}, false + + data, err := os.ReadFile(path) + if err != nil { + // already checked ErrNotExist above + t.Errorf("%s: %s", path, err) + return "", false + } + + if !utf8.Valid(data) { + t.Errorf("%s: not valid utf-8", path) + return "", false + } + + return string(data), true } func CopyDir(src, dst string, inputs, outputs map[string]bool) error { From 3ffac800071a397763bddb49e22c1aca4f55573c Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Tue, 28 Jan 2025 11:23:44 +0100 Subject: [PATCH 20/25] acc: Use real terraform when CLOUD_ENV is set (#2245) ## Changes - If CLOUD_ENV is set to do not override with dummy value. This allows running acceptance tests as integration tests. - Needed for https://github.com/databricks/cli/pull/2242 ## Tests Manually run the test suite against dogfood. `CLOUD_ENV=aws go test ./acceptance` --- acceptance/acceptance_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 877c7239d..b4b27f201 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -99,9 +99,6 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int { repls.SetPath(tempHomeDir, "$TMPHOME") t.Logf("$TMPHOME=%v", tempHomeDir) - // Prevent CLI from downloading terraform in each test: - t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir) - // Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically uvCache := getUVDefaultCacheDir(t) t.Setenv("UV_CACHE_DIR", uvCache) @@ -119,6 +116,9 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int { homeDir := t.TempDir() // Do not read user's ~/.databrickscfg t.Setenv(env.HomeEnvVar(), homeDir) + + // Prevent CLI from downloading terraform in each test: + t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir) } workspaceClient, err := databricks.NewWorkspaceClient() From 65e4f79dfec84f45689ec3241da62ca3660112e6 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 28 Jan 2025 16:24:23 +0530 Subject: [PATCH 21/25] Switch to using `[` from `<` in text replacements (#2224) ## Changes Noticed this when working on https://github.com/databricks/cli/pull/2221. `<` is a special HTML character that is encoded during text replacement when using `AssertEqualTexts`. ## Tests N/A --- .../dbt-sql/output/my_dbt_sql/databricks.yml | 2 +- .../output/my_default_python/databricks.yml | 2 +- .../my_default_python/scratch/exploration.ipynb | 2 +- .../output/my_default_python/src/dlt_pipeline.ipynb | 6 +++--- .../output/my_default_python/src/notebook.ipynb | 4 ++-- .../default-sql/output/my_default_sql/databricks.yml | 2 +- .../output/my_default_sql/scratch/exploration.ipynb | 2 +- .../output/my_jobs_as_code/databricks.yml | 2 +- .../output/my_jobs_as_code/src/notebook.ipynb | 4 ++-- .../bundle/testdata/default_python/bundle_deploy.txt | 2 +- .../testdata/default_python/bundle_summary.txt | 12 ++++++------ libs/testdiff/replacement.go | 4 ++-- libs/testdiff/replacement_test.go | 4 ++-- 13 files changed, 24 insertions(+), 24 deletions(-) diff --git a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml index 1962bc543..cdf3704b9 100644 --- a/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml +++ b/acceptance/bundle/templates/dbt-sql/output/my_dbt_sql/databricks.yml @@ -3,7 +3,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_dbt_sql - uuid: + uuid: [UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml b/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml index 9deca9cf5..3fa777219 100644 --- a/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml +++ b/acceptance/bundle/templates/default-python/output/my_default_python/databricks.yml @@ -2,7 +2,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_default_python - uuid: + uuid: [UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb b/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb index 3b2fef4b4..a12773d4e 100644 --- a/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb +++ b/acceptance/bundle/templates/default-python/output/my_default_python/scratch/exploration.ipynb @@ -20,7 +20,7 @@ "rowLimit": 10000 }, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb b/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb index 36e993af7..8a02183e7 100644 --- a/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb +++ b/acceptance/bundle/templates/default-python/output/my_default_python/src/dlt_pipeline.ipynb @@ -6,7 +6,7 @@ "application/vnd.databricks.v1+cell": { "cellMetadata": {}, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } @@ -24,7 +24,7 @@ "application/vnd.databricks.v1+cell": { "cellMetadata": {}, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } @@ -47,7 +47,7 @@ "application/vnd.databricks.v1+cell": { "cellMetadata": {}, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } diff --git a/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb b/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb index 0d560443b..472ccb219 100644 --- a/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb +++ b/acceptance/bundle/templates/default-python/output/my_default_python/src/notebook.ipynb @@ -6,7 +6,7 @@ "application/vnd.databricks.v1+cell": { "cellMetadata": {}, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } @@ -37,7 +37,7 @@ "rowLimit": 10000 }, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } diff --git a/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml b/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml index ab857287e..16292bc84 100644 --- a/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml +++ b/acceptance/bundle/templates/default-sql/output/my_default_sql/databricks.yml @@ -2,7 +2,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_default_sql - uuid: + uuid: [UUID] include: - resources/*.yml diff --git a/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb b/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb index c3fd072e5..f3976c1de 100644 --- a/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb +++ b/acceptance/bundle/templates/default-sql/output/my_default_sql/scratch/exploration.ipynb @@ -7,7 +7,7 @@ "application/vnd.databricks.v1+cell": { "cellMetadata": {}, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml index a1a93d95c..54e69a256 100644 --- a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml +++ b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/databricks.yml @@ -2,7 +2,7 @@ # See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. bundle: name: my_jobs_as_code - uuid: + uuid: [UUID] databricks_cli_version: ">= 0.238.0" experimental: diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb index 9bc3f1560..227c7cc55 100644 --- a/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb +++ b/acceptance/bundle/templates/experimental-jobs-as-code/output/my_jobs_as_code/src/notebook.ipynb @@ -6,7 +6,7 @@ "application/vnd.databricks.v1+cell": { "cellMetadata": {}, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } @@ -37,7 +37,7 @@ "rowLimit": 10000 }, "inputWidgets": {}, - "nuid": "", + "nuid": "[UUID]", "showTitle": false, "title": "" } diff --git a/integration/bundle/testdata/default_python/bundle_deploy.txt b/integration/bundle/testdata/default_python/bundle_deploy.txt index eef0b79b3..d7b8cede9 100644 --- a/integration/bundle/testdata/default_python/bundle_deploy.txt +++ b/integration/bundle/testdata/default_python/bundle_deploy.txt @@ -1,5 +1,5 @@ Building project_name_$UNIQUE_PRJ... -Uploading project_name_$UNIQUE_PRJ-0.0.1+.-py3-none-any.whl... +Uploading project_name_$UNIQUE_PRJ-0.0.1+[NUMID].[NUMID]-py3-none-any.whl... Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files... Deploying resources... Updating deployment state... diff --git a/integration/bundle/testdata/default_python/bundle_summary.txt b/integration/bundle/testdata/default_python/bundle_summary.txt index 318cd2543..88ccdc496 100644 --- a/integration/bundle/testdata/default_python/bundle_summary.txt +++ b/integration/bundle/testdata/default_python/bundle_summary.txt @@ -16,7 +16,7 @@ "enabled": false } }, - "uuid": "" + "uuid": "[UUID]" }, "include": [ "resources/project_name_$UNIQUE_PRJ.job.yml", @@ -74,7 +74,7 @@ ] }, "format": "MULTI_TASK", - "id": "", + "id": "[NUMID]", "job_clusters": [ { "job_cluster_key": "job_cluster", @@ -141,7 +141,7 @@ "unit": "DAYS" } }, - "url": "$DATABRICKS_URL/jobs/?o=" + "url": "$DATABRICKS_URL/jobs/[NUMID]?o=[NUMID]" } }, "pipelines": { @@ -155,7 +155,7 @@ "metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json" }, "development": true, - "id": "", + "id": "[UUID]", "libraries": [ { "notebook": { @@ -165,7 +165,7 @@ ], "name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_pipeline", "target": "project_name_$UNIQUE_PRJ_dev", - "url": "$DATABRICKS_URL/pipelines/?o=" + "url": "$DATABRICKS_URL/pipelines/[UUID]?o=[NUMID]" } } }, @@ -183,4 +183,4 @@ "dev": "$USERNAME" } } -} \ No newline at end of file +} diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go index 40e7e72b4..ce5476a57 100644 --- a/libs/testdiff/replacement.go +++ b/libs/testdiff/replacement.go @@ -201,12 +201,12 @@ func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam. func PrepareReplacementsUUID(t testutil.TestingT, r *ReplacementsContext) { t.Helper() - r.append(uuidRegex, "") + r.append(uuidRegex, "[UUID]") } func PrepareReplacementsNumber(t testutil.TestingT, r *ReplacementsContext) { t.Helper() - r.append(numIdRegex, "") + r.append(numIdRegex, "[NUMID]") } func PrepareReplacementsTemporaryDirectory(t testutil.TestingT, r *ReplacementsContext) { diff --git a/libs/testdiff/replacement_test.go b/libs/testdiff/replacement_test.go index de247c03e..1b6c5fe2d 100644 --- a/libs/testdiff/replacement_test.go +++ b/libs/testdiff/replacement_test.go @@ -25,7 +25,7 @@ func TestReplacement_UUID(t *testing.T) { PrepareReplacementsUUID(t, &repls) - assert.Equal(t, "", repls.Replace("123e4567-e89b-12d3-a456-426614174000")) + assert.Equal(t, "[UUID]", repls.Replace("123e4567-e89b-12d3-a456-426614174000")) } func TestReplacement_Number(t *testing.T) { @@ -34,7 +34,7 @@ func TestReplacement_Number(t *testing.T) { PrepareReplacementsNumber(t, &repls) assert.Equal(t, "12", repls.Replace("12")) - assert.Equal(t, "", repls.Replace("123")) + assert.Equal(t, "[NUMID]", repls.Replace("123")) } func TestReplacement_TemporaryDirectory(t *testing.T) { From 5971bd5c1ac0997a88f56dd4ccc88acf501e5267 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Tue, 28 Jan 2025 15:00:41 +0100 Subject: [PATCH 22/25] acc: Disable git hooks (#2249) Otherwise hooks from universe and custom hooks run in tests. --- acceptance/script.prepare | 1 + 1 file changed, 1 insertion(+) diff --git a/acceptance/script.prepare b/acceptance/script.prepare index b814a1260..ca47cdbff 100644 --- a/acceptance/script.prepare +++ b/acceptance/script.prepare @@ -39,6 +39,7 @@ git-repo-init() { git config core.autocrlf false git config user.name "Tester" git config user.email "tester@databricks.com" + git config core.hooksPath no-hooks git add databricks.yml git commit -qm 'Add databricks.yml' } From 025622540809702994aaefdb1e387a6552c00afa Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Tue, 28 Jan 2025 15:12:47 +0100 Subject: [PATCH 23/25] acc: Exclude secrets from replacements (#2250) They should never be printed by CLI anyway. --- libs/testdiff/replacement.go | 9 --------- 1 file changed, 9 deletions(-) diff --git a/libs/testdiff/replacement.go b/libs/testdiff/replacement.go index ce5476a57..7077e611b 100644 --- a/libs/testdiff/replacement.go +++ b/libs/testdiff/replacement.go @@ -146,25 +146,16 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont r.Set(w.Config.ClusterID, "$DATABRICKS_CLUSTER_ID") r.Set(w.Config.WarehouseID, "$DATABRICKS_WAREHOUSE_ID") r.Set(w.Config.ServerlessComputeID, "$DATABRICKS_SERVERLESS_COMPUTE_ID") - r.Set(w.Config.MetadataServiceURL, "$DATABRICKS_METADATA_SERVICE_URL") r.Set(w.Config.AccountID, "$DATABRICKS_ACCOUNT_ID") - r.Set(w.Config.Token, "$DATABRICKS_TOKEN") r.Set(w.Config.Username, "$DATABRICKS_USERNAME") - r.Set(w.Config.Password, "$DATABRICKS_PASSWORD") r.SetPath(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE") r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE") r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT") - r.Set(w.Config.GoogleCredentials, "$GOOGLE_CREDENTIALS") r.Set(w.Config.AzureResourceID, "$DATABRICKS_AZURE_RESOURCE_ID") - r.Set(w.Config.AzureClientSecret, "$ARM_CLIENT_SECRET") - // r.Set(w.Config.AzureClientID, "$ARM_CLIENT_ID") r.Set(w.Config.AzureClientID, testerName) r.Set(w.Config.AzureTenantID, "$ARM_TENANT_ID") - r.Set(w.Config.ActionsIDTokenRequestURL, "$ACTIONS_ID_TOKEN_REQUEST_URL") - r.Set(w.Config.ActionsIDTokenRequestToken, "$ACTIONS_ID_TOKEN_REQUEST_TOKEN") r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT") r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID") - r.Set(w.Config.ClientSecret, "$DATABRICKS_CLIENT_SECRET") r.SetPath(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH") // This is set to words like "path" that happen too frequently // r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE") From 4ba222ab3632c45e488e88d3c54b6e05cbfe441b Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Tue, 28 Jan 2025 15:22:56 +0100 Subject: [PATCH 24/25] Fix env_overrides not to use variables in workspace.profile (#2251) This does not work when this test is run against cloud. Needed for https://github.com/databricks/cli/pull/2242 --- acceptance/bundle/variables/env_overrides/databricks.yml | 7 ++++--- acceptance/bundle/variables/env_overrides/output.txt | 3 ++- acceptance/bundle/variables/env_overrides/script | 6 +++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/acceptance/bundle/variables/env_overrides/databricks.yml b/acceptance/bundle/variables/env_overrides/databricks.yml index 560513bc3..e5fc7fcc4 100644 --- a/acceptance/bundle/variables/env_overrides/databricks.yml +++ b/acceptance/bundle/variables/env_overrides/databricks.yml @@ -18,12 +18,13 @@ variables: description: variable with lookup lookup: cluster_policy: wrong-cluster-policy + + result: + default: ${var.a} ${var.b} + bundle: name: test bundle -workspace: - profile: ${var.a} ${var.b} - targets: env-with-single-variable-override: variables: diff --git a/acceptance/bundle/variables/env_overrides/output.txt b/acceptance/bundle/variables/env_overrides/output.txt index 1ee9ef625..06e6e518b 100644 --- a/acceptance/bundle/variables/env_overrides/output.txt +++ b/acceptance/bundle/variables/env_overrides/output.txt @@ -36,5 +36,6 @@ Exit code: 1 "b": "prod-b", "d": "4321", "e": "1234", - "f": "9876" + "f": "9876", + "result": "default-a prod-b" } diff --git a/acceptance/bundle/variables/env_overrides/script b/acceptance/bundle/variables/env_overrides/script index 30919fd8a..3965d1564 100644 --- a/acceptance/bundle/variables/env_overrides/script +++ b/acceptance/bundle/variables/env_overrides/script @@ -1,6 +1,6 @@ -trace $CLI bundle validate -t env-with-single-variable-override -o json | jq .workspace.profile -trace $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile -trace BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile +trace $CLI bundle validate -t env-with-single-variable-override -o json | jq .variables.result.value +trace $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .variables.result.value +trace BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .variables.result.value trace errcode $CLI bundle validate -t env-missing-a-required-variable-assignment trace errcode $CLI bundle validate -t env-using-an-undefined-variable trace $CLI bundle validate -t env-overrides-lookup -o json | jq '.variables | map_values(.value)' From 099e9bed0f2250e3dcece80e6e64d8873c75e74d Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 28 Jan 2025 15:34:44 +0100 Subject: [PATCH 25/25] Upgrade TF provider to 1.64.1 (#2247) ## Changes - Added support for `no_compute` in Apps - Added support for `run_as_repl` for job tasks --- bundle/internal/tf/codegen/schema/version.go | 2 +- .../internal/tf/schema/data_source_serving_endpoints.go | 8 ++++---- bundle/internal/tf/schema/resource_app.go | 1 + bundle/internal/tf/schema/resource_job.go | 2 ++ bundle/internal/tf/schema/resource_model_serving.go | 8 ++++---- bundle/internal/tf/schema/resource_recipient.go | 1 + bundle/internal/tf/schema/root.go | 2 +- 7 files changed, 14 insertions(+), 10 deletions(-) diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go index 677b8fc10..393afd6ed 100644 --- a/bundle/internal/tf/codegen/schema/version.go +++ b/bundle/internal/tf/codegen/schema/version.go @@ -1,3 +1,3 @@ package schema -const ProviderVersion = "1.63.0" +const ProviderVersion = "1.64.1" diff --git a/bundle/internal/tf/schema/data_source_serving_endpoints.go b/bundle/internal/tf/schema/data_source_serving_endpoints.go index bdfd778e0..973989216 100644 --- a/bundle/internal/tf/schema/data_source_serving_endpoints.go +++ b/bundle/internal/tf/schema/data_source_serving_endpoints.go @@ -3,7 +3,7 @@ package schema type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii struct { - Behavior string `json:"behavior"` + Behavior string `json:"behavior,omitempty"` } type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput struct { @@ -14,7 +14,7 @@ type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput struct { } type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii struct { - Behavior string `json:"behavior"` + Behavior string `json:"behavior,omitempty"` } type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutput struct { @@ -87,8 +87,8 @@ type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelDatabri type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig struct { PrivateKey string `json:"private_key,omitempty"` PrivateKeyPlaintext string `json:"private_key_plaintext,omitempty"` - ProjectId string `json:"project_id,omitempty"` - Region string `json:"region,omitempty"` + ProjectId string `json:"project_id"` + Region string `json:"region"` } type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelOpenaiConfig struct { diff --git a/bundle/internal/tf/schema/resource_app.go b/bundle/internal/tf/schema/resource_app.go index 14c93b793..cbce5ab0e 100644 --- a/bundle/internal/tf/schema/resource_app.go +++ b/bundle/internal/tf/schema/resource_app.go @@ -91,6 +91,7 @@ type ResourceApp struct { DefaultSourceCodePath string `json:"default_source_code_path,omitempty"` Description string `json:"description,omitempty"` Name string `json:"name"` + NoCompute bool `json:"no_compute,omitempty"` PendingDeployment *ResourceAppPendingDeployment `json:"pending_deployment,omitempty"` Resources []ResourceAppResources `json:"resources,omitempty"` ServicePrincipalClientId string `json:"service_principal_client_id,omitempty"` diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go index 63c8aeb7b..da277b5c1 100644 --- a/bundle/internal/tf/schema/resource_job.go +++ b/bundle/internal/tf/schema/resource_job.go @@ -904,6 +904,7 @@ type ResourceJobTaskForEachTaskTaskSparkJarTask struct { JarUri string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` Parameters []string `json:"parameters,omitempty"` + RunAsRepl bool `json:"run_as_repl,omitempty"` } type ResourceJobTaskForEachTaskTaskSparkPythonTask struct { @@ -1299,6 +1300,7 @@ type ResourceJobTaskSparkJarTask struct { JarUri string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` Parameters []string `json:"parameters,omitempty"` + RunAsRepl bool `json:"run_as_repl,omitempty"` } type ResourceJobTaskSparkPythonTask struct { diff --git a/bundle/internal/tf/schema/resource_model_serving.go b/bundle/internal/tf/schema/resource_model_serving.go index 71cf8925d..2025de34c 100644 --- a/bundle/internal/tf/schema/resource_model_serving.go +++ b/bundle/internal/tf/schema/resource_model_serving.go @@ -3,7 +3,7 @@ package schema type ResourceModelServingAiGatewayGuardrailsInputPii struct { - Behavior string `json:"behavior"` + Behavior string `json:"behavior,omitempty"` } type ResourceModelServingAiGatewayGuardrailsInput struct { @@ -14,7 +14,7 @@ type ResourceModelServingAiGatewayGuardrailsInput struct { } type ResourceModelServingAiGatewayGuardrailsOutputPii struct { - Behavior string `json:"behavior"` + Behavior string `json:"behavior,omitempty"` } type ResourceModelServingAiGatewayGuardrailsOutput struct { @@ -94,8 +94,8 @@ type ResourceModelServingConfigServedEntitiesExternalModelDatabricksModelServing type ResourceModelServingConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig struct { PrivateKey string `json:"private_key,omitempty"` PrivateKeyPlaintext string `json:"private_key_plaintext,omitempty"` - ProjectId string `json:"project_id,omitempty"` - Region string `json:"region,omitempty"` + ProjectId string `json:"project_id"` + Region string `json:"region"` } type ResourceModelServingConfigServedEntitiesExternalModelOpenaiConfig struct { diff --git a/bundle/internal/tf/schema/resource_recipient.go b/bundle/internal/tf/schema/resource_recipient.go index 91de4df76..4c8f2c7e7 100644 --- a/bundle/internal/tf/schema/resource_recipient.go +++ b/bundle/internal/tf/schema/resource_recipient.go @@ -29,6 +29,7 @@ type ResourceRecipient struct { CreatedAt int `json:"created_at,omitempty"` CreatedBy string `json:"created_by,omitempty"` DataRecipientGlobalMetastoreId string `json:"data_recipient_global_metastore_id,omitempty"` + ExpirationTime int `json:"expiration_time,omitempty"` Id string `json:"id,omitempty"` MetastoreId string `json:"metastore_id,omitempty"` Name string `json:"name"` diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go index 7dd3f9210..2ac852355 100644 --- a/bundle/internal/tf/schema/root.go +++ b/bundle/internal/tf/schema/root.go @@ -21,7 +21,7 @@ type Root struct { const ProviderHost = "registry.terraform.io" const ProviderSource = "databricks/databricks" -const ProviderVersion = "1.63.0" +const ProviderVersion = "1.64.1" func NewRoot() *Root { return &Root{