Compare commits

..

6 Commits

Author SHA1 Message Date
Andrew Nester 913e10a037
Added support for Databricks Apps in DABs (#1928)
## Changes
Now it's possible to configure new `app` resource in bundle and point it
to the custom `source_code_path` location where Databricks App code is
defined.

On `databricks bundle deploy` DABs will create an app. All consecutive
`databricks bundle deploy` execution will update an existing app if
there are any updated

On `databricks bundle run <my_app>` DABs will execute app deployment. If
the app is not started yet, it will start the app first.

### Bundle configuration

```
bundle:
  name: apps

variables:
  my_job_id:
    description: "ID of job to run app"
    lookup:
      job: "My Job"
  databricks_name:
    description: "Name for app user"
  additional_flags:
    description: "Additional flags to run command app"
    default: ""
  my_app_config:
    type: complex
    description: "Configuration for my Databricks App"
    default:
      command:
        - flask
        - --app
        - hello
        - run
        - ${var.additional_flags}
      env:
        - name: DATABRICKS_NAME
          value: ${var.databricks_name}

resources:
  apps:
    my_app:
      name: "anester-app" # required and has to be unique
      description: "My App"
      source_code_path: ./app # required and points to location of app code
      config: ${var.my_app_config}
      resources:
        - name: "my-job"
          description: "A job for app to be able to run"
          job:
            id: ${var.my_job_id}
            permission: "CAN_MANAGE_RUN"
      permissions:
        - user_name: "foo@bar.com"
          level: "CAN_VIEW"
        - service_principal_name: "my_sp"
          level: "CAN_MANAGE"

targets:
  dev:
    variables:
      databricks_name: "Andrew (from dev)"
      additional_flags: --debug
  
  prod:
    variables:
      databricks_name: "Andrew (from prod)"
```

### Execution
1. `databricks bundle deploy -t dev`
2. `databricks bundle run my_app -t dev`

**If app is started**
```
✓ Getting the status of the app my-app
✓ App is in RUNNING state
✓ Preparing source code for new app deployment.
✓ Deployment is pending
✓ Starting app with command: flask --app hello run --debug
✓ App started successfully
You can access the app at <app-url>
```

**If app is not started**
```
✓ Getting the status of the app my-app
✓ App is in UNAVAILABLE state
✓ Starting the app my-app
✓ App is starting...
....
✓ App is starting...
✓ App is started!
✓ Preparing source code for new app deployment.
✓ Downloading source code from /Workspace/Users/...
✓ Starting app with command: flask --app hello run --debug
✓ App started successfully
You can access the app at <app-url>
```

## Tests
Added unit and config tests + manual test.

```
--- PASS: TestAccDeployBundleWithApp (404.59s)
PASS
coverage: 36.8% of statements in ./...
ok      github.com/databricks/cli/internal/bundle       405.035s        coverage: 36.8% of statements in ./...
```
2025-01-13 16:43:48 +00:00
Denis Bilenko a6412e4334
Remove redundant lines from PrepareReplacementsUser (#2130)
They are not necessary because they are added below. Also, they will
cause a crash if u.Name is nil.
2025-01-13 16:12:03 +00:00
dependabot[bot] 8234604cad
Bump golang.org/x/term from 0.27.0 to 0.28.0 (#2078)
Bumps [golang.org/x/term](https://github.com/golang/term) from 0.27.0 to
0.28.0.
<details>
<summary>Commits</summary>
<ul>
<li><a
href="40b02d69cd"><code>40b02d6</code></a>
go.mod: update golang.org/x dependencies</li>
<li>See full diff in <a
href="https://github.com/golang/term/compare/v0.27.0...v0.28.0">compare
view</a></li>
</ul>
</details>
<br />


[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/term&package-manager=go_modules&previous-version=0.27.0&new-version=0.28.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)

Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.

[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)

---

<details>
<summary>Dependabot commands and options</summary>
<br />

You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show <dependency name> ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)


</details>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-13 13:26:55 +00:00
dependabot[bot] f8ab384bfb
Bump github.com/hashicorp/hc-install from 0.9.0 to 0.9.1 (#2079)
Bumps
[github.com/hashicorp/hc-install](https://github.com/hashicorp/hc-install)
from 0.9.0 to 0.9.1.
<details>
<summary>Release notes</summary>
<p><em>Sourced from <a
href="https://github.com/hashicorp/hc-install/releases">github.com/hashicorp/hc-install's
releases</a>.</em></p>
<blockquote>
<h2>v0.9.1</h2>
<h2>What's Changed</h2>
<ul>
<li>build(deps): bump github.com/go-git/go-git/v5 from 5.12.0 to 5.13.0
by <a href="https://github.com/dependabot"><code>@​dependabot</code></a>
in <a
href="https://redirect.github.com/hashicorp/hc-install/pull/268">hashicorp/hc-install#268</a></li>
<li>build(deps): bump github.com/ProtonMail/go-crypto from 1.1.0 to
1.1.2 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a
href="https://redirect.github.com/hashicorp/hc-install/pull/261">hashicorp/hc-install#261</a></li>
<li>build(deps): bump github.com/ProtonMail/go-crypto from 1.1.0-alpha.2
to 1.1.0 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a
href="https://redirect.github.com/hashicorp/hc-install/pull/259">hashicorp/hc-install#259</a></li>
<li>build(deps): bump github.com/ProtonMail/go-crypto from 1.1.2 to
1.1.3 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a
href="https://redirect.github.com/hashicorp/hc-install/pull/263">hashicorp/hc-install#263</a></li>
<li>build(deps): bump golang.org/x/mod from 0.21.0 to 0.22.0 by <a
href="https://github.com/dependabot"><code>@​dependabot</code></a> in <a
href="https://redirect.github.com/hashicorp/hc-install/pull/262">hashicorp/hc-install#262</a></li>
</ul>
<h2>New Contributors</h2>
<ul>
<li><a
href="https://github.com/imakewebthings"><code>@​imakewebthings</code></a>
made their first contribution in <a
href="https://redirect.github.com/hashicorp/hc-install/pull/252">hashicorp/hc-install#252</a></li>
</ul>
<p><strong>Full Changelog</strong>: <a
href="https://github.com/hashicorp/hc-install/compare/v0.9.0...v0.9.1">https://github.com/hashicorp/hc-install/compare/v0.9.0...v0.9.1</a></p>
</blockquote>
</details>
<details>
<summary>Commits</summary>
<ul>
<li><a
href="a9cdf85469"><code>a9cdf85</code></a>
Prepare for 0.9.1 release (<a
href="https://redirect.github.com/hashicorp/hc-install/issues/269">#269</a>)</li>
<li><a
href="18d08ba3e4"><code>18d08ba</code></a>
build(deps): Bump workflows to latest trusted versions (<a
href="https://redirect.github.com/hashicorp/hc-install/issues/266">#266</a>)</li>
<li><a
href="e716f0ac3e"><code>e716f0a</code></a>
build(deps): bump github.com/go-git/go-git/v5 from 5.12.0 to 5.13.0 (<a
href="https://redirect.github.com/hashicorp/hc-install/issues/268">#268</a>)</li>
<li><a
href="cca0f6dd33"><code>cca0f6d</code></a>
ci: Report code coverage (<a
href="https://redirect.github.com/hashicorp/hc-install/issues/264">#264</a>)</li>
<li><a
href="131f8ffdb0"><code>131f8ff</code></a>
build(deps): bump github.com/ProtonMail/go-crypto from 1.1.2 to 1.1.3
(<a
href="https://redirect.github.com/hashicorp/hc-install/issues/263">#263</a>)</li>
<li><a
href="2609a7830a"><code>2609a78</code></a>
build(deps): bump golang.org/x/mod from 0.21.0 to 0.22.0 (<a
href="https://redirect.github.com/hashicorp/hc-install/issues/262">#262</a>)</li>
<li><a
href="b9043f8dd1"><code>b9043f8</code></a>
build(deps): bump github.com/ProtonMail/go-crypto from 1.1.0 to 1.1.2
(<a
href="https://redirect.github.com/hashicorp/hc-install/issues/261">#261</a>)</li>
<li><a
href="c1dc8ac751"><code>c1dc8ac</code></a>
build(deps): bump github.com/ProtonMail/go-crypto from 1.1.0-alpha.2 to
1.1.0...</li>
<li><a
href="8ed2e0f78e"><code>8ed2e0f</code></a>
build(deps): Bump workflows to latest trusted versions (<a
href="https://redirect.github.com/hashicorp/hc-install/issues/258">#258</a>)</li>
<li><a
href="7a0461e713"><code>7a0461e</code></a>
build(deps): Bump workflows to latest trusted versions (<a
href="https://redirect.github.com/hashicorp/hc-install/issues/257">#257</a>)</li>
<li>Additional commits viewable in <a
href="https://github.com/hashicorp/hc-install/compare/v0.9.0...v0.9.1">compare
view</a></li>
</ul>
</details>
<br />

<details>
<summary>Most Recent Ignore Conditions Applied to This Pull
Request</summary>

| Dependency Name | Ignore Conditions |
| --- | --- |
| github.com/hashicorp/hc-install | [>= 0.8.a, < 0.9] |
</details>


[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/hashicorp/hc-install&package-manager=go_modules&previous-version=0.9.0&new-version=0.9.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)

Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.

[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)

---

<details>
<summary>Dependabot commands and options</summary>
<br />

You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show <dependency name> ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)


</details>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-13 13:26:47 +00:00
dependabot[bot] 244a5b6bc6
Bump golang.org/x/oauth2 from 0.24.0 to 0.25.0 (#2080)
Bumps [golang.org/x/oauth2](https://github.com/golang/oauth2) from
0.24.0 to 0.25.0.
<details>
<summary>Commits</summary>
<ul>
<li><a
href="49a531d12a"><code>49a531d</code></a>
all: make method and struct comments match the names</li>
<li>See full diff in <a
href="https://github.com/golang/oauth2/compare/v0.24.0...v0.25.0">compare
view</a></li>
</ul>
</details>
<br />


[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/oauth2&package-manager=go_modules&previous-version=0.24.0&new-version=0.25.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)

Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.

[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)

---

<details>
<summary>Dependabot commands and options</summary>
<br />

You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show <dependency name> ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)


</details>

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-01-13 13:26:35 +00:00
Denis Bilenko 1ead1b2e36
Move merge fix-ups after variable resolution (#2125)
## Changes
Move mutator.Merge{JobClusters,JobParameters,JobTasks,PipelineClusters}
after variable resolution. This helps with the case when key contains a
variable.

@pietern mentioned here
https://github.com/databricks/cli/pull/2101#pullrequestreview-2539168762
it should be safe.

## Tests
Existing acceptance that was capturing the bug is updated with corrected
output.
2025-01-13 13:01:31 +00:00
56 changed files with 2903 additions and 134 deletions

View File

@ -20,7 +20,6 @@ targets:
jobs: jobs:
foo: foo:
job_clusters: job_clusters:
# This does not work because merging is done before resolution
- job_cluster_key: "${var.mykey}" - job_cluster_key: "${var.mykey}"
new_cluster: new_cluster:
node_type_id: i3.xlarge node_type_id: i3.xlarge

View File

@ -9,17 +9,12 @@
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",
"job_clusters": [ "job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"spark_version": "13.3.x-scala2.12"
}
},
{ {
"job_cluster_key": "key", "job_cluster_key": "key",
"new_cluster": { "new_cluster": {
"node_type_id": "i3.xlarge", "node_type_id": "i3.xlarge",
"num_workers": 1 "num_workers": 1,
"spark_version": "13.3.x-scala2.12"
} }
} }
], ],
@ -51,17 +46,12 @@ Validation OK!
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",
"job_clusters": [ "job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"spark_version": "13.3.x-scala2.12"
}
},
{ {
"job_cluster_key": "key", "job_cluster_key": "key",
"new_cluster": { "new_cluster": {
"node_type_id": "i3.2xlarge", "node_type_id": "i3.2xlarge",
"num_workers": 4 "num_workers": 4,
"spark_version": "13.3.x-scala2.12"
} }
} }
], ],

View File

@ -0,0 +1,50 @@
package apps
import (
"context"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/dynvar"
)
type interpolateVariables struct{}
func (i *interpolateVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
pattern := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("apps"),
dyn.AnyKey(),
dyn.Key("config"),
)
tfToConfigMap := map[string]string{}
for k, r := range config.SupportedResources() {
tfToConfigMap[r.TerraformResourceName] = k
}
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
return dyn.MapByPattern(root, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) {
key, ok := tfToConfigMap[path[0].Key()]
if ok {
path = dyn.NewPath(dyn.Key("resources"), dyn.Key(key)).Append(path[1:]...)
}
return dyn.GetByPath(root, path)
})
})
})
return diag.FromErr(err)
}
func (i *interpolateVariables) Name() string {
return "apps.InterpolateVariables"
}
func InterpolateVariables() bundle.Mutator {
return &interpolateVariables{}
}

View File

@ -0,0 +1,49 @@
package apps
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/require"
)
func TestAppInterpolateVariables(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app_1": {
App: &apps.App{
Name: "my_app_1",
},
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "JOB_ID", "value": "${databricks_job.my_job.id}"},
},
},
},
"my_app_2": {
App: &apps.App{
Name: "my_app_2",
},
},
},
Jobs: map[string]*resources.Job{
"my_job": {
ID: "123",
},
},
},
},
}
diags := bundle.Apply(context.Background(), b, InterpolateVariables())
require.Empty(t, diags)
require.Equal(t, []any([]any{map[string]any{"name": "JOB_ID", "value": "123"}}), b.Config.Resources.Apps["my_app_1"].Config["env"])
require.Nil(t, b.Config.Resources.Apps["my_app_2"].Config)
}

View File

@ -0,0 +1,97 @@
package apps
import (
"bytes"
"context"
"fmt"
"path"
"strings"
"sync"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/deploy"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/filer"
"golang.org/x/sync/errgroup"
"gopkg.in/yaml.v3"
)
type uploadConfig struct {
filerFactory deploy.FilerFactory
}
func (u *uploadConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
errGroup, ctx := errgroup.WithContext(ctx)
mu := sync.Mutex{}
for key, app := range b.Config.Resources.Apps {
// If the app has a config, we need to deploy it first.
// It means we need to write app.yml file with the content of the config field
// to the remote source code path of the app.
if app.Config != nil {
appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath)
buf, err := configToYaml(app)
if err != nil {
return diag.FromErr(err)
}
f, err := u.filerFactory(b)
if err != nil {
return diag.FromErr(err)
}
errGroup.Go(func() error {
err := f.Write(ctx, path.Join(appPath, "app.yml"), buf, filer.OverwriteIfExists)
if err != nil {
mu.Lock()
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Failed to save config",
Detail: fmt.Sprintf("Failed to write %s file: %s", path.Join(app.SourceCodePath, "app.yml"), err),
Locations: b.Config.GetLocations("resources.apps." + key),
})
mu.Unlock()
}
return nil
})
}
}
if err := errGroup.Wait(); err != nil {
return diags.Extend(diag.FromErr(err))
}
return diags
}
// Name implements bundle.Mutator.
func (u *uploadConfig) Name() string {
return "apps:UploadConfig"
}
func UploadConfig() bundle.Mutator {
return &uploadConfig{
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
return filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.FilePath)
},
}
}
func configToYaml(app *resources.App) (*bytes.Buffer, error) {
buf := bytes.NewBuffer(nil)
enc := yaml.NewEncoder(buf)
enc.SetIndent(2)
err := enc.Encode(app.Config)
defer enc.Close()
if err != nil {
return nil, fmt.Errorf("failed to encode app config to yaml: %w", err)
}
return buf, nil
}

View File

@ -0,0 +1,75 @@
package apps
import (
"bytes"
"context"
"os"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
mockfiler "github.com/databricks/cli/internal/mocks/libs/filer"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)
func TestAppUploadConfig(t *testing.T) {
root := t.TempDir()
err := os.MkdirAll(filepath.Join(root, "my_app"), 0o700)
require.NoError(t, err)
b := &bundle.Bundle{
BundleRootPath: root,
SyncRootPath: root,
SyncRoot: vfs.MustNew(root),
Config: config.Root{
Workspace: config.Workspace{
RootPath: "/Workspace/Users/foo@bar.com/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app": {
App: &apps.App{
Name: "my_app",
},
SourceCodePath: "./my_app",
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "MY_APP", "value": "my value"},
},
},
},
},
},
},
}
mockFiler := mockfiler.NewMockFiler(t)
mockFiler.EXPECT().Write(mock.Anything, "my_app/app.yml", bytes.NewBufferString(`command:
- echo
- hello
env:
- name: MY_APP
value: my value
`), filer.OverwriteIfExists).Return(nil)
u := uploadConfig{
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
return mockFiler, nil
},
}
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), &u))
require.NoError(t, diags.Error())
}

53
bundle/apps/validate.go Normal file
View File

@ -0,0 +1,53 @@
package apps
import (
"context"
"fmt"
"path"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
)
type validate struct{}
func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
possibleConfigFiles := []string{"app.yml", "app.yaml"}
usedSourceCodePaths := make(map[string]string)
for key, app := range b.Config.Resources.Apps {
if _, ok := usedSourceCodePaths[app.SourceCodePath]; ok {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Duplicate app source code path",
Detail: fmt.Sprintf("app resource '%s' has the same source code path as app resource '%s', this will lead to the app configuration being overriden by each other", key, usedSourceCodePaths[app.SourceCodePath]),
Locations: b.Config.GetLocations(fmt.Sprintf("resources.apps.%s.source_code_path", key)),
})
}
usedSourceCodePaths[app.SourceCodePath] = key
for _, configFile := range possibleConfigFiles {
appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath)
cf := path.Join(appPath, configFile)
if _, err := b.SyncRoot.Stat(cf); err == nil {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: configFile + " detected",
Detail: fmt.Sprintf("remove %s and use 'config' property for app resource '%s' instead", cf, app.Name),
})
}
}
}
return diags
}
func (v *validate) Name() string {
return "apps.Validate"
}
func Validate() bundle.Mutator {
return &validate{}
}

View File

@ -0,0 +1,97 @@
package apps
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/require"
)
func TestAppsValidate(t *testing.T) {
tmpDir := t.TempDir()
testutil.Touch(t, tmpDir, "app1", "app.yml")
testutil.Touch(t, tmpDir, "app2", "app.py")
b := &bundle.Bundle{
BundleRootPath: tmpDir,
SyncRootPath: tmpDir,
SyncRoot: vfs.MustNew(tmpDir),
Config: config.Root{
Workspace: config.Workspace{
FilePath: "/foo/bar/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"app1": {
App: &apps.App{
Name: "app1",
},
SourceCodePath: "./app1",
},
"app2": {
App: &apps.App{
Name: "app2",
},
SourceCodePath: "./app2",
},
},
},
},
}
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate()))
require.Len(t, diags, 1)
require.Equal(t, "app.yml detected", diags[0].Summary)
require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource")
}
func TestAppsValidateSameSourcePath(t *testing.T) {
tmpDir := t.TempDir()
testutil.Touch(t, tmpDir, "app1", "app.py")
b := &bundle.Bundle{
BundleRootPath: tmpDir,
SyncRootPath: tmpDir,
SyncRoot: vfs.MustNew(tmpDir),
Config: config.Root{
Workspace: config.Workspace{
FilePath: "/foo/bar/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"app1": {
App: &apps.App{
Name: "app1",
},
SourceCodePath: "./app1",
},
"app2": {
App: &apps.App{
Name: "app2",
},
SourceCodePath: "./app1",
},
},
},
},
}
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate()))
require.Len(t, diags, 1)
require.Equal(t, "Duplicate app source code path", diags[0].Summary)
require.Contains(t, diags[0].Detail, "has the same source code path as app resource")
}

View File

@ -0,0 +1,37 @@
package generate
import (
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/apps"
)
func ConvertAppToValue(app *apps.App, sourceCodePath string, appConfig map[string]any) (dyn.Value, error) {
ac, err := convert.FromTyped(appConfig, dyn.NilValue)
if err != nil {
return dyn.NilValue, err
}
ar, err := convert.FromTyped(app.Resources, dyn.NilValue)
if err != nil {
return dyn.NilValue, err
}
// The majority of fields of the app struct are read-only.
// We copy the relevant fields manually.
dv := map[string]dyn.Value{
"name": dyn.NewValue(app.Name, []dyn.Location{{Line: 1}}),
"description": dyn.NewValue(app.Description, []dyn.Location{{Line: 2}}),
"source_code_path": dyn.NewValue(sourceCodePath, []dyn.Location{{Line: 3}}),
}
if ac.Kind() != dyn.KindNil {
dv["config"] = ac.WithLocations([]dyn.Location{{Line: 4}})
}
if ar.Kind() != dyn.KindNil {
dv["resources"] = ar.WithLocations([]dyn.Location{{Line: 5}})
}
return dyn.V(dv), nil
}

View File

@ -221,6 +221,8 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
dashboard.DisplayName = prefix + dashboard.DisplayName dashboard.DisplayName = prefix + dashboard.DisplayName
} }
// Apps: No presets
return diags return diags
} }

View File

@ -56,6 +56,22 @@ func (m *applySourceLinkedDeploymentPreset) Apply(ctx context.Context, b *bundle
b.Config.Presets.SourceLinkedDeployment = &enabled b.Config.Presets.SourceLinkedDeployment = &enabled
} }
if len(b.Config.Resources.Apps) > 0 && config.IsExplicitlyEnabled(b.Config.Presets.SourceLinkedDeployment) {
path := dyn.NewPath(dyn.Key("targets"), dyn.Key(target), dyn.Key("presets"), dyn.Key("source_linked_deployment"))
diags = diags.Append(
diag.Diagnostic{
Severity: diag.Error,
Summary: "source-linked deployment is not supported for apps",
Paths: []dyn.Path{
path,
},
Locations: b.Config.GetLocations(path[2:].String()),
},
)
return diags
}
if b.Config.Workspace.FilePath != "" && config.IsExplicitlyEnabled(b.Config.Presets.SourceLinkedDeployment) { if b.Config.Workspace.FilePath != "" && config.IsExplicitlyEnabled(b.Config.Presets.SourceLinkedDeployment) {
path := dyn.NewPath(dyn.Key("targets"), dyn.Key(target), dyn.Key("workspace"), dyn.Key("file_path")) path := dyn.NewPath(dyn.Key("targets"), dyn.Key(target), dyn.Key("workspace"), dyn.Key("file_path"))

View File

@ -8,6 +8,7 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest" "github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/dbr" "github.com/databricks/cli/libs/dbr"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
@ -31,6 +32,7 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
initialValue *bool initialValue *bool
expectedValue *bool expectedValue *bool
expectedWarning string expectedWarning string
expectedError string
}{ }{
{ {
name: "preset enabled, bundle in Workspace, databricks runtime", name: "preset enabled, bundle in Workspace, databricks runtime",
@ -86,6 +88,18 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
expectedValue: &enabled, expectedValue: &enabled,
expectedWarning: "workspace.file_path setting will be ignored in source-linked deployment mode", expectedWarning: "workspace.file_path setting will be ignored in source-linked deployment mode",
}, },
{
name: "preset enabled, apps is defined by user",
ctx: dbr.MockRuntime(testContext, true),
mutateBundle: func(b *bundle.Bundle) {
b.Config.Resources.Apps = map[string]*resources.App{
"app": {},
}
},
initialValue: &enabled,
expectedValue: &enabled,
expectedError: "source-linked deployment is not supported for apps",
},
} }
for _, tt := range tests { for _, tt := range tests {
@ -107,7 +121,7 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
bundletest.SetLocation(b, "workspace.file_path", []dyn.Location{{File: "databricks.yml"}}) bundletest.SetLocation(b, "workspace.file_path", []dyn.Location{{File: "databricks.yml"}})
diags := bundle.Apply(tt.ctx, b, mutator.ApplySourceLinkedDeploymentPreset()) diags := bundle.Apply(tt.ctx, b, mutator.ApplySourceLinkedDeploymentPreset())
if diags.HasError() { if diags.HasError() && tt.expectedError == "" {
t.Fatalf("unexpected error: %v", diags) t.Fatalf("unexpected error: %v", diags)
} }
@ -116,6 +130,11 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
require.NotEmpty(t, diags[0].Locations) require.NotEmpty(t, diags[0].Locations)
} }
if tt.expectedError != "" {
require.Equal(t, tt.expectedError, diags[0].Summary)
require.NotEmpty(t, diags[0].Locations)
}
require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment) require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment)
}) })
} }

View File

@ -0,0 +1,45 @@
package mutator
import (
"context"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/merge"
)
type mergeApps struct{}
func MergeApps() bundle.Mutator {
return &mergeApps{}
}
func (m *mergeApps) Name() string {
return "MergeApps"
}
func (m *mergeApps) resourceName(v dyn.Value) string {
switch v.Kind() {
case dyn.KindInvalid, dyn.KindNil:
return ""
case dyn.KindString:
return v.MustString()
default:
panic("app name must be a string")
}
}
func (m *mergeApps) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
if v.Kind() == dyn.KindNil {
return v, nil
}
return dyn.Map(v, "resources.apps", dyn.Foreach(func(_ dyn.Path, app dyn.Value) (dyn.Value, error) {
return dyn.Map(app, "resources", merge.ElementsByKeyWithOverride("name", m.resourceName))
}))
})
return diag.FromErr(err)
}

View File

@ -0,0 +1,73 @@
package mutator_test
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/assert"
)
func TestMergeApps(t *testing.T) {
b := &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Apps: map[string]*resources.App{
"foo": {
App: &apps.App{
Name: "foo",
Resources: []apps.AppResource{
{
Name: "job1",
Job: &apps.AppResourceJob{
Id: "1234",
Permission: "CAN_MANAGE_RUN",
},
},
{
Name: "sql1",
SqlWarehouse: &apps.AppResourceSqlWarehouse{
Id: "5678",
Permission: "CAN_USE",
},
},
{
Name: "job1",
Job: &apps.AppResourceJob{
Id: "1234",
Permission: "CAN_MANAGE",
},
},
{
Name: "sql1",
Job: &apps.AppResourceJob{
Id: "9876",
Permission: "CAN_MANAGE",
},
},
},
},
},
},
},
},
}
diags := bundle.Apply(context.Background(), b, mutator.MergeApps())
assert.NoError(t, diags.Error())
j := b.Config.Resources.Apps["foo"]
assert.Len(t, j.Resources, 2)
assert.Equal(t, "job1", j.Resources[0].Name)
assert.Equal(t, "sql1", j.Resources[1].Name)
assert.Equal(t, "CAN_MANAGE", string(j.Resources[0].Job.Permission))
assert.Nil(t, j.Resources[1].SqlWarehouse)
assert.Equal(t, "CAN_MANAGE", string(j.Resources[1].Job.Permission))
}

View File

@ -13,6 +13,7 @@ import (
"github.com/databricks/cli/libs/tags" "github.com/databricks/cli/libs/tags"
"github.com/databricks/cli/libs/vfs" "github.com/databricks/cli/libs/vfs"
sdkconfig "github.com/databricks/databricks-sdk-go/config" sdkconfig "github.com/databricks/databricks-sdk-go/config"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/dashboards"
@ -142,6 +143,13 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
}, },
}, },
}, },
Apps: map[string]*resources.App{
"app1": {
App: &apps.App{
Name: "app1",
},
},
},
}, },
}, },
SyncRoot: vfs.MustNew("/Users/lennart.kats@databricks.com"), SyncRoot: vfs.MustNew("/Users/lennart.kats@databricks.com"),
@ -433,6 +441,13 @@ func TestAllNonUcResourcesAreRenamed(t *testing.T) {
for _, key := range field.MapKeys() { for _, key := range field.MapKeys() {
resource := field.MapIndex(key) resource := field.MapIndex(key)
nameField := resource.Elem().FieldByName("Name") nameField := resource.Elem().FieldByName("Name")
resourceType := resources.Type().Field(i).Name
// Skip apps, as they are not renamed
if resourceType == "Apps" {
continue
}
if !nameField.IsValid() || nameField.Kind() != reflect.String { if !nameField.IsValid() || nameField.Kind() != reflect.String {
continue continue
} }

View File

@ -119,6 +119,16 @@ func validateRunAs(b *bundle.Bundle) diag.Diagnostics {
)) ))
} }
// Apps do not support run_as in the API.
if len(b.Config.Resources.Apps) > 0 {
diags = diags.Extend(reportRunAsNotSupported(
"apps",
b.Config.GetLocation("resources.apps"),
b.Config.Workspace.CurrentUser.UserName,
identity,
))
}
return diags return diags
} }

View File

@ -32,6 +32,7 @@ func allResourceTypes(t *testing.T) []string {
// the dyn library gives us the correct list of all resources supported. Please // the dyn library gives us the correct list of all resources supported. Please
// also update this check when adding a new resource // also update this check when adding a new resource
require.Equal(t, []string{ require.Equal(t, []string{
"apps",
"clusters", "clusters",
"dashboards", "dashboards",
"experiments", "experiments",
@ -104,47 +105,47 @@ func TestRunAsWorksForAllowedResources(t *testing.T) {
} }
} }
func TestRunAsErrorForUnsupportedResources(t *testing.T) { // Bundle "run_as" has two modes of operation, each with a different set of
// Bundle "run_as" has two modes of operation, each with a different set of // resources that are supported.
// resources that are supported. // Cases:
// Cases: // 1. When the bundle "run_as" identity is same as the current deployment
// 1. When the bundle "run_as" identity is same as the current deployment // identity. In this case all resources are supported.
// identity. In this case all resources are supported. // 2. When the bundle "run_as" identity is different from the current
// 2. When the bundle "run_as" identity is different from the current // deployment identity. In this case only a subset of resources are
// deployment identity. In this case only a subset of resources are // supported. This subset of resources are defined in the allow list below.
// supported. This subset of resources are defined in the allow list below. //
// // To be a part of the allow list, the resource must satisfy one of the following
// To be a part of the allow list, the resource must satisfy one of the following // two conditions:
// two conditions: // 1. The resource supports setting a run_as identity to a different user
// 1. The resource supports setting a run_as identity to a different user // from the owner/creator of the resource. For example, jobs.
// from the owner/creator of the resource. For example, jobs. // 2. Run as semantics do not apply to the resource. We do not plan to add
// 2. Run as semantics do not apply to the resource. We do not plan to add // platform side support for `run_as` for these resources. For example,
// platform side support for `run_as` for these resources. For example, // experiments or registered models.
// experiments or registered models. //
// // Any resource that is not on the allow list cannot be used when the bundle
// Any resource that is not on the allow list cannot be used when the bundle // run_as is different from the current deployment user. "bundle validate" must
// run_as is different from the current deployment user. "bundle validate" must // return an error if such a resource has been defined, and the run_as identity
// return an error if such a resource has been defined, and the run_as identity // is different from the current deployment identity.
// is different from the current deployment identity. //
// // Action Item: If you are adding a new resource to DABs, please check in with
// Action Item: If you are adding a new resource to DABs, please check in with // the relevant owning team whether the resource should be on the allow list or (implicitly) on
// the relevant owning team whether the resource should be on the allow list or (implicitly) on // the deny list. Any resources that could have run_as semantics in the future
// the deny list. Any resources that could have run_as semantics in the future // should be on the deny list.
// should be on the deny list. // For example: Teams for pipelines, model serving endpoints or Lakeview dashboards
// For example: Teams for pipelines, model serving endpoints or Lakeview dashboards // are planning to add platform side support for `run_as` for these resources at
// are planning to add platform side support for `run_as` for these resources at // some point in the future. These resources are (implicitly) on the deny list, since
// some point in the future. These resources are (implicitly) on the deny list, since // they are not on the allow list below.
// they are not on the allow list below. var allowList = []string{
allowList := []string{ "clusters",
"clusters", "jobs",
"jobs", "models",
"models", "registered_models",
"registered_models", "experiments",
"experiments", "schemas",
"schemas", "volumes",
"volumes", }
}
func TestRunAsErrorForUnsupportedResources(t *testing.T) {
base := config.Root{ base := config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
CurrentUser: &config.User{ CurrentUser: &config.User{
@ -197,3 +198,54 @@ func TestRunAsErrorForUnsupportedResources(t *testing.T) {
"See https://docs.databricks.com/dev-tools/bundles/run-as.html to learn more about the run_as property.", rt) "See https://docs.databricks.com/dev-tools/bundles/run-as.html to learn more about the run_as property.", rt)
} }
} }
func TestRunAsNoErrorForSupportedResources(t *testing.T) {
base := config.Root{
Workspace: config.Workspace{
CurrentUser: &config.User{
User: &iam.User{
UserName: "alice",
},
},
},
RunAs: &jobs.JobRunAs{
UserName: "bob",
},
}
v, err := convert.FromTyped(base, dyn.NilValue)
require.NoError(t, err)
// Define top level resources key in the bundle configuration.
// This is not part of the typed configuration, so we need to add it manually.
v, err = dyn.Set(v, "resources", dyn.V(map[string]dyn.Value{}))
require.NoError(t, err)
for _, rt := range allResourceTypes(t) {
// Skip unsupported resources
if !slices.Contains(allowList, rt) {
continue
}
// Add an instance of the resource type that is not on the allow list to
// the bundle configuration.
nv, err := dyn.SetByPath(v, dyn.NewPath(dyn.Key("resources"), dyn.Key(rt)), dyn.V(map[string]dyn.Value{
"foo": dyn.V(map[string]dyn.Value{
"name": dyn.V("bar"),
}),
}))
require.NoError(t, err)
// Get back typed configuration from the newly created invalid bundle configuration.
r := &config.Root{}
err = convert.ToTyped(r, nv)
require.NoError(t, err)
// Assert this configuration passes validation.
b := &bundle.Bundle{
Config: *r,
}
diags := bundle.Apply(context.Background(), b, SetRunAs())
require.NoError(t, diags.Error())
}
}

View File

@ -262,6 +262,7 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
t.applyPipelineTranslations, t.applyPipelineTranslations,
t.applyArtifactTranslations, t.applyArtifactTranslations,
t.applyDashboardTranslations, t.applyDashboardTranslations,
t.applyAppsTranslations,
} { } {
v, err = fn(v) v, err = fn(v)
if err != nil { if err != nil {

View File

@ -0,0 +1,28 @@
package mutator
import (
"fmt"
"github.com/databricks/cli/libs/dyn"
)
func (t *translateContext) applyAppsTranslations(v dyn.Value) (dyn.Value, error) {
// Convert the `source_code_path` field to a remote absolute path.
// We use this path for app deployment to point to the source code.
pattern := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("apps"),
dyn.AnyKey(),
dyn.Key("source_code_path"),
)
return dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
key := p[2].Key()
dir, err := v.Location().Directory()
if err != nil {
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for app %s: %w", key, err)
}
return t.rewriteRelativeTo(p, v, t.translateDirectoryPath, dir, "")
})
}

View File

@ -0,0 +1,57 @@
package mutator_test
import (
"context"
"path/filepath"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTranslatePathsApps_FilePathRelativeSubDirectory(t *testing.T) {
dir := t.TempDir()
touchEmptyFile(t, filepath.Join(dir, "src", "app", "app.py"))
b := &bundle.Bundle{
SyncRootPath: dir,
SyncRoot: vfs.MustNew(dir),
Config: config.Root{
Workspace: config.Workspace{
FilePath: "/bundle/files",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"app": {
App: &apps.App{
Name: "My App",
},
SourceCodePath: "../src/app",
},
},
},
},
}
bundletest.SetLocation(b, "resources.apps", []dyn.Location{{
File: filepath.Join(dir, "resources/app.yml"),
}})
diags := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
require.NoError(t, diags.Error())
// Assert that the file path for the app has been converted to its local absolute path.
assert.Equal(
t,
"/bundle/files/src/app",
b.Config.Resources.Apps["app"].SourceCodePath,
)
}

View File

@ -23,6 +23,7 @@ type Resources struct {
Volumes map[string]*resources.Volume `json:"volumes,omitempty"` Volumes map[string]*resources.Volume `json:"volumes,omitempty"`
Clusters map[string]*resources.Cluster `json:"clusters,omitempty"` Clusters map[string]*resources.Cluster `json:"clusters,omitempty"`
Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"` Dashboards map[string]*resources.Dashboard `json:"dashboards,omitempty"`
Apps map[string]*resources.App `json:"apps,omitempty"`
} }
type ConfigResource interface { type ConfigResource interface {
@ -87,6 +88,7 @@ func (r *Resources) AllResources() []ResourceGroup {
collectResourceMap(descriptions["clusters"], r.Clusters), collectResourceMap(descriptions["clusters"], r.Clusters),
collectResourceMap(descriptions["dashboards"], r.Dashboards), collectResourceMap(descriptions["dashboards"], r.Dashboards),
collectResourceMap(descriptions["volumes"], r.Volumes), collectResourceMap(descriptions["volumes"], r.Volumes),
collectResourceMap(descriptions["apps"], r.Apps),
} }
} }
@ -97,12 +99,19 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error)
found = append(found, r.Jobs[k]) found = append(found, r.Jobs[k])
} }
} }
for k := range r.Pipelines { for k := range r.Pipelines {
if k == key { if k == key {
found = append(found, r.Pipelines[k]) found = append(found, r.Pipelines[k])
} }
} }
for k := range r.Apps {
if k == key {
found = append(found, r.Apps[k])
}
}
if len(found) == 0 { if len(found) == 0 {
return nil, fmt.Errorf("no such resource: %s", key) return nil, fmt.Errorf("no such resource: %s", key)
} }
@ -126,76 +135,96 @@ type ResourceDescription struct {
// Singular and plural title when used in summaries / terminal UI. // Singular and plural title when used in summaries / terminal UI.
SingularTitle string SingularTitle string
PluralTitle string PluralTitle string
TerraformResourceName string
} }
// The keys of the map corresponds to the resource key in the bundle configuration. // The keys of the map corresponds to the resource key in the bundle configuration.
func SupportedResources() map[string]ResourceDescription { func SupportedResources() map[string]ResourceDescription {
return map[string]ResourceDescription{ return map[string]ResourceDescription{
"jobs": { "jobs": {
SingularName: "job", SingularName: "job",
PluralName: "jobs", PluralName: "jobs",
SingularTitle: "Job", SingularTitle: "Job",
PluralTitle: "Jobs", PluralTitle: "Jobs",
TerraformResourceName: "databricks_job",
}, },
"pipelines": { "pipelines": {
SingularName: "pipeline", SingularName: "pipeline",
PluralName: "pipelines", PluralName: "pipelines",
SingularTitle: "Pipeline", SingularTitle: "Pipeline",
PluralTitle: "Pipelines", PluralTitle: "Pipelines",
TerraformResourceName: "databricks_pipeline",
}, },
"models": { "models": {
SingularName: "model", SingularName: "model",
PluralName: "models", PluralName: "models",
SingularTitle: "Model", SingularTitle: "Model",
PluralTitle: "Models", PluralTitle: "Models",
TerraformResourceName: "databricks_mlflow_model",
}, },
"experiments": { "experiments": {
SingularName: "experiment", SingularName: "experiment",
PluralName: "experiments", PluralName: "experiments",
SingularTitle: "Experiment", SingularTitle: "Experiment",
PluralTitle: "Experiments", PluralTitle: "Experiments",
TerraformResourceName: "databricks_mlflow_experiment",
}, },
"model_serving_endpoints": { "model_serving_endpoints": {
SingularName: "model_serving_endpoint", SingularName: "model_serving_endpoint",
PluralName: "model_serving_endpoints", PluralName: "model_serving_endpoints",
SingularTitle: "Model Serving Endpoint", SingularTitle: "Model Serving Endpoint",
PluralTitle: "Model Serving Endpoints", PluralTitle: "Model Serving Endpoints",
TerraformResourceName: "databricks_model_serving_endpoint",
}, },
"registered_models": { "registered_models": {
SingularName: "registered_model", SingularName: "registered_model",
PluralName: "registered_models", PluralName: "registered_models",
SingularTitle: "Registered Model", SingularTitle: "Registered Model",
PluralTitle: "Registered Models", PluralTitle: "Registered Models",
TerraformResourceName: "databricks_registered_model",
}, },
"quality_monitors": { "quality_monitors": {
SingularName: "quality_monitor", SingularName: "quality_monitor",
PluralName: "quality_monitors", PluralName: "quality_monitors",
SingularTitle: "Quality Monitor", SingularTitle: "Quality Monitor",
PluralTitle: "Quality Monitors", PluralTitle: "Quality Monitors",
TerraformResourceName: "databricks_quality_monitor",
}, },
"schemas": { "schemas": {
SingularName: "schema", SingularName: "schema",
PluralName: "schemas", PluralName: "schemas",
SingularTitle: "Schema", SingularTitle: "Schema",
PluralTitle: "Schemas", PluralTitle: "Schemas",
TerraformResourceName: "databricks_schema",
}, },
"clusters": { "clusters": {
SingularName: "cluster", SingularName: "cluster",
PluralName: "clusters", PluralName: "clusters",
SingularTitle: "Cluster", SingularTitle: "Cluster",
PluralTitle: "Clusters", PluralTitle: "Clusters",
TerraformResourceName: "databricks_cluster",
}, },
"dashboards": { "dashboards": {
SingularName: "dashboard", SingularName: "dashboard",
PluralName: "dashboards", PluralName: "dashboards",
SingularTitle: "Dashboard", SingularTitle: "Dashboard",
PluralTitle: "Dashboards", PluralTitle: "Dashboards",
TerraformResourceName: "databricks_dashboard",
}, },
"volumes": { "volumes": {
SingularName: "volume", SingularName: "volume",
PluralName: "volumes", PluralName: "volumes",
SingularTitle: "Volume", SingularTitle: "Volume",
PluralTitle: "Volumes", PluralTitle: "Volumes",
TerraformResourceName: "databricks_volume",
},
"apps": {
SingularName: "app",
PluralName: "apps",
SingularTitle: "App",
PluralTitle: "Apps",
TerraformResourceName: "databricks_app",
}, },
} }
} }

View File

@ -0,0 +1,70 @@
package resources
import (
"context"
"net/url"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/marshal"
"github.com/databricks/databricks-sdk-go/service/apps"
)
type App struct {
// SourceCodePath is a required field used by DABs to point to Databricks app source code
// on local disk and to the corresponding workspace path during app deployment.
SourceCodePath string `json:"source_code_path"`
// Config is an optional field which allows configuring the app following Databricks app configuration format like in app.yml.
// When this field is set, DABs read the configuration set in this field and write
// it to app.yml in the root of the source code folder in Databricks workspace.
// If theres app.yml defined locally, DABs will raise an error.
Config map[string]any `json:"config,omitempty"`
Permissions []Permission `json:"permissions,omitempty"`
ModifiedStatus ModifiedStatus `json:"modified_status,omitempty" bundle:"internal"`
URL string `json:"url,omitempty" bundle:"internal"`
*apps.App
}
func (a *App) UnmarshalJSON(b []byte) error {
return marshal.Unmarshal(b, a)
}
func (a App) MarshalJSON() ([]byte, error) {
return marshal.Marshal(a)
}
func (a *App) Exists(ctx context.Context, w *databricks.WorkspaceClient, name string) (bool, error) {
_, err := w.Apps.GetByName(ctx, name)
if err != nil {
log.Debugf(ctx, "app %s does not exist", name)
return false, err
}
return true, nil
}
func (a *App) TerraformResourceName() string {
return "databricks_app"
}
func (a *App) InitializeURL(baseURL url.URL) {
if a.ModifiedStatus == "" || a.ModifiedStatus == ModifiedStatusCreated {
return
}
baseURL.Path = "apps/" + a.Name
a.URL = baseURL.String()
}
func (a *App) GetName() string {
return a.Name
}
func (a *App) GetURL() string {
return a.URL
}
func (a *App) IsNil() bool {
return a.App == nil
}

View File

@ -9,6 +9,7 @@ import (
"github.com/databricks/cli/bundle/deploy/terraform/tfdyn" "github.com/databricks/cli/bundle/deploy/terraform/tfdyn"
"github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/apps"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
) )
@ -196,6 +197,20 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
} }
cur.ID = instance.Attributes.ID cur.ID = instance.Attributes.ID
config.Resources.Dashboards[resource.Name] = cur config.Resources.Dashboards[resource.Name] = cur
case "databricks_app":
if config.Resources.Apps == nil {
config.Resources.Apps = make(map[string]*resources.App)
}
cur := config.Resources.Apps[resource.Name]
if cur == nil {
cur = &resources.App{ModifiedStatus: resources.ModifiedStatusDeleted, App: &apps.App{}}
} else {
// If the app exists in terraform and bundle, we always set modified status to updated
// because we don't really know if the app source code was updated or not.
cur.ModifiedStatus = resources.ModifiedStatusUpdated
}
cur.Name = instance.Attributes.Name
config.Resources.Apps[resource.Name] = cur
case "databricks_permissions": case "databricks_permissions":
case "databricks_grants": case "databricks_grants":
// Ignore; no need to pull these back into the configuration. // Ignore; no need to pull these back into the configuration.
@ -260,6 +275,11 @@ func TerraformToBundle(state *resourcesState, config *config.Root) error {
src.ModifiedStatus = resources.ModifiedStatusCreated src.ModifiedStatus = resources.ModifiedStatusCreated
} }
} }
for _, src := range config.Resources.Apps {
if src.ModifiedStatus == "" {
src.ModifiedStatus = resources.ModifiedStatusCreated
}
}
return nil return nil
} }

View File

@ -10,6 +10,7 @@ import (
"github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/catalog"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/dashboards"
@ -694,6 +695,14 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
{Attributes: stateInstanceAttributes{ID: "1"}}, {Attributes: stateInstanceAttributes{ID: "1"}},
}, },
}, },
{
Type: "databricks_app",
Mode: "managed",
Name: "test_app",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "app1"}},
},
},
}, },
} }
err := TerraformToBundle(&tfState, &config) err := TerraformToBundle(&tfState, &config)
@ -732,6 +741,9 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID) assert.Equal(t, "1", config.Resources.Dashboards["test_dashboard"].ID)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard"].ModifiedStatus) assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
assert.Equal(t, "app1", config.Resources.Apps["test_app"].Name)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app"].ModifiedStatus)
AssertFullResourceCoverage(t, &config) AssertFullResourceCoverage(t, &config)
} }
@ -815,6 +827,13 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
}, },
}, },
}, },
Apps: map[string]*resources.App{
"test_app": {
App: &apps.App{
Description: "test_app",
},
},
},
}, },
} }
tfState := resourcesState{ tfState := resourcesState{
@ -856,6 +875,9 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ID) assert.Equal(t, "", config.Resources.Dashboards["test_dashboard"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard"].ModifiedStatus) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard"].ModifiedStatus)
assert.Equal(t, "", config.Resources.Apps["test_app"].Name)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app"].ModifiedStatus)
AssertFullResourceCoverage(t, &config) AssertFullResourceCoverage(t, &config)
} }
@ -994,6 +1016,18 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
}, },
}, },
}, },
Apps: map[string]*resources.App{
"test_app": {
App: &apps.App{
Name: "test_app",
},
},
"test_app_new": {
App: &apps.App{
Name: "test_app_new",
},
},
},
}, },
} }
tfState := resourcesState{ tfState := resourcesState{
@ -1174,6 +1208,22 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
{Attributes: stateInstanceAttributes{ID: "2"}}, {Attributes: stateInstanceAttributes{ID: "2"}},
}, },
}, },
{
Type: "databricks_app",
Mode: "managed",
Name: "test_app",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "test_app"}},
},
},
{
Type: "databricks_app",
Mode: "managed",
Name: "test_app_old",
Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{Name: "test_app_old"}},
},
},
}, },
} }
err := TerraformToBundle(&tfState, &config) err := TerraformToBundle(&tfState, &config)
@ -1256,6 +1306,13 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
assert.Equal(t, "", config.Resources.Dashboards["test_dashboard_new"].ID) assert.Equal(t, "", config.Resources.Dashboards["test_dashboard_new"].ID)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard_new"].ModifiedStatus) assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Dashboards["test_dashboard_new"].ModifiedStatus)
assert.Equal(t, "test_app", config.Resources.Apps["test_app"].Name)
assert.Equal(t, resources.ModifiedStatusUpdated, config.Resources.Apps["test_app"].ModifiedStatus)
assert.Equal(t, "test_app_old", config.Resources.Apps["test_app_old"].Name)
assert.Equal(t, resources.ModifiedStatusDeleted, config.Resources.Apps["test_app_old"].ModifiedStatus)
assert.Equal(t, "test_app_new", config.Resources.Apps["test_app_new"].Name)
assert.Equal(t, resources.ModifiedStatusCreated, config.Resources.Apps["test_app_new"].ModifiedStatus)
AssertFullResourceCoverage(t, &config) AssertFullResourceCoverage(t, &config)
} }

View File

@ -63,6 +63,8 @@ func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.D
path = dyn.NewPath(dyn.Key("databricks_cluster")).Append(path[2:]...) path = dyn.NewPath(dyn.Key("databricks_cluster")).Append(path[2:]...)
case dyn.Key("dashboards"): case dyn.Key("dashboards"):
path = dyn.NewPath(dyn.Key("databricks_dashboard")).Append(path[2:]...) path = dyn.NewPath(dyn.Key("databricks_dashboard")).Append(path[2:]...)
case dyn.Key("apps"):
path = dyn.NewPath(dyn.Key("databricks_app")).Append(path[2:]...)
default: default:
// Trigger "key not found" for unknown resource types. // Trigger "key not found" for unknown resource types.
return dyn.GetByPath(root, path) return dyn.GetByPath(root, path)

View File

@ -34,6 +34,7 @@ func TestInterpolate(t *testing.T) {
"other_volume": "${resources.volumes.other_volume.id}", "other_volume": "${resources.volumes.other_volume.id}",
"other_cluster": "${resources.clusters.other_cluster.id}", "other_cluster": "${resources.clusters.other_cluster.id}",
"other_dashboard": "${resources.dashboards.other_dashboard.id}", "other_dashboard": "${resources.dashboards.other_dashboard.id}",
"other_app": "${resources.apps.other_app.id}",
}, },
Tasks: []jobs.Task{ Tasks: []jobs.Task{
{ {
@ -73,6 +74,7 @@ func TestInterpolate(t *testing.T) {
assert.Equal(t, "${databricks_volume.other_volume.id}", j.Tags["other_volume"]) assert.Equal(t, "${databricks_volume.other_volume.id}", j.Tags["other_volume"])
assert.Equal(t, "${databricks_cluster.other_cluster.id}", j.Tags["other_cluster"]) assert.Equal(t, "${databricks_cluster.other_cluster.id}", j.Tags["other_cluster"])
assert.Equal(t, "${databricks_dashboard.other_dashboard.id}", j.Tags["other_dashboard"]) assert.Equal(t, "${databricks_dashboard.other_dashboard.id}", j.Tags["other_dashboard"])
assert.Equal(t, "${databricks_app.other_app.id}", j.Tags["other_app"])
m := b.Config.Resources.Models["my_model"] m := b.Config.Resources.Models["my_model"]
assert.Equal(t, "my_model", m.Model.Name) assert.Equal(t, "my_model", m.Model.Name)

View File

@ -0,0 +1,55 @@
package tfdyn
import (
"context"
"fmt"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/log"
"github.com/databricks/databricks-sdk-go/service/apps"
)
func convertAppResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
// Check if the description is not set and if it's not, set it to an empty string.
// This is done to avoid TF drift because Apps API return empty string for description when if it's not set.
if _, err := dyn.Get(vin, "description"); err != nil {
vin, err = dyn.Set(vin, "description", dyn.V(""))
if err != nil {
return vin, err
}
}
// Normalize the output value to the target schema.
vout, diags := convert.Normalize(apps.App{}, vin)
for _, diag := range diags {
log.Debugf(ctx, "app normalization diagnostic: %s", diag.Summary)
}
return vout, nil
}
type appConverter struct{}
func (appConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error {
vout, err := convertAppResource(ctx, vin)
if err != nil {
return err
}
// Add the converted resource to the output.
out.App[key] = vout.AsAny()
// Configure permissions for this resource.
if permissions := convertPermissionsResource(ctx, vin); permissions != nil {
permissions.AppName = fmt.Sprintf("${databricks_app.%s.name}", key)
out.Permissions["app_"+key] = permissions
}
return nil
}
func init() {
registerConverter("apps", appConverter{})
}

View File

@ -0,0 +1,156 @@
package tfdyn
import (
"context"
"testing"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConvertApp(t *testing.T) {
src := resources.App{
SourceCodePath: "./app",
Config: map[string]any{
"command": []string{"python", "app.py"},
},
App: &apps.App{
Name: "app_id",
Description: "app description",
Resources: []apps.AppResource{
{
Name: "job1",
Job: &apps.AppResourceJob{
Id: "1234",
Permission: "CAN_MANAGE_RUN",
},
},
{
Name: "sql1",
SqlWarehouse: &apps.AppResourceSqlWarehouse{
Id: "5678",
Permission: "CAN_USE",
},
},
},
},
Permissions: []resources.Permission{
{
Level: "CAN_RUN",
UserName: "jack@gmail.com",
},
{
Level: "CAN_MANAGE",
ServicePrincipalName: "sp",
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = appConverter{}.Convert(ctx, "my_app", vin, out)
require.NoError(t, err)
app := out.App["my_app"]
assert.Equal(t, map[string]any{
"description": "app description",
"name": "app_id",
"resources": []any{
map[string]any{
"name": "job1",
"job": map[string]any{
"id": "1234",
"permission": "CAN_MANAGE_RUN",
},
},
map[string]any{
"name": "sql1",
"sql_warehouse": map[string]any{
"id": "5678",
"permission": "CAN_USE",
},
},
},
}, app)
// Assert equality on the permissions
assert.Equal(t, &schema.ResourcePermissions{
AppName: "${databricks_app.my_app.name}",
AccessControl: []schema.ResourcePermissionsAccessControl{
{
PermissionLevel: "CAN_RUN",
UserName: "jack@gmail.com",
},
{
PermissionLevel: "CAN_MANAGE",
ServicePrincipalName: "sp",
},
},
}, out.Permissions["app_my_app"])
}
func TestConvertAppWithNoDescription(t *testing.T) {
src := resources.App{
SourceCodePath: "./app",
Config: map[string]any{
"command": []string{"python", "app.py"},
},
App: &apps.App{
Name: "app_id",
Resources: []apps.AppResource{
{
Name: "job1",
Job: &apps.AppResourceJob{
Id: "1234",
Permission: "CAN_MANAGE_RUN",
},
},
{
Name: "sql1",
SqlWarehouse: &apps.AppResourceSqlWarehouse{
Id: "5678",
Permission: "CAN_USE",
},
},
},
},
}
vin, err := convert.FromTyped(src, dyn.NilValue)
require.NoError(t, err)
ctx := context.Background()
out := schema.NewResources()
err = appConverter{}.Convert(ctx, "my_app", vin, out)
require.NoError(t, err)
app := out.App["my_app"]
assert.Equal(t, map[string]any{
"name": "app_id",
"description": "", // Due to Apps API always returning a description field, we set it in the output as well to avoid permanent TF drift
"resources": []any{
map[string]any{
"name": "job1",
"job": map[string]any{
"id": "1234",
"permission": "CAN_MANAGE_RUN",
},
},
map[string]any{
"name": "sql1",
"sql_warehouse": map[string]any{
"id": "5678",
"permission": "CAN_USE",
},
},
},
}, app)
}

View File

@ -33,7 +33,12 @@ type stateResourceInstance struct {
} }
type stateInstanceAttributes struct { type stateInstanceAttributes struct {
ID string `json:"id"` ID string `json:"id"`
// Some resources such as Apps do not have an ID, so we use the name instead.
// We need this for cases when such resource is removed from bundle config but
// exists in the workspace still so we can correctly display its summary.
Name string `json:"name,omitempty"`
ETag string `json:"etag,omitempty"` ETag string `json:"etag,omitempty"`
} }

View File

@ -97,7 +97,7 @@ func TestParseResourcesStateWithExistingStateFile(t *testing.T) {
Type: "databricks_pipeline", Type: "databricks_pipeline",
Name: "test_pipeline", Name: "test_pipeline",
Instances: []stateResourceInstance{ Instances: []stateResourceInstance{
{Attributes: stateInstanceAttributes{ID: "123"}}, {Attributes: stateInstanceAttributes{ID: "123", Name: "test_pipeline"}},
}, },
}, },
}, },

View File

@ -147,6 +147,9 @@ github.com/databricks/cli/bundle/config.Python:
If enabled, Python code will execute within this environment. If disabled, If enabled, Python code will execute within this environment. If disabled,
it defaults to using the Python interpreter available in the current shell. it defaults to using the Python interpreter available in the current shell.
github.com/databricks/cli/bundle/config.Resources: github.com/databricks/cli/bundle/config.Resources:
"apps":
"description": |-
PLACEHOLDER
"clusters": "clusters":
"description": |- "description": |-
The cluster definitions for the bundle. The cluster definitions for the bundle.
@ -371,6 +374,64 @@ github.com/databricks/cli/bundle/config.Workspace:
"state_path": "state_path":
"description": |- "description": |-
The workspace state path The workspace state path
github.com/databricks/cli/bundle/config/resources.App:
"active_deployment":
"description": |-
PLACEHOLDER
"app_status":
"description": |-
PLACEHOLDER
"compute_status":
"description": |-
PLACEHOLDER
"config":
"description": |-
PLACEHOLDER
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"default_source_code_path":
"description": |-
PLACEHOLDER
"description":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"pending_deployment":
"description": |-
PLACEHOLDER
"permissions":
"description": |-
PLACEHOLDER
"resources":
"description": |-
PLACEHOLDER
"service_principal_client_id":
"description": |-
PLACEHOLDER
"service_principal_id":
"description": |-
PLACEHOLDER
"service_principal_name":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
"updater":
"description": |-
PLACEHOLDER
"url":
"description": |-
PLACEHOLDER
github.com/databricks/cli/bundle/config/resources.Grant: github.com/databricks/cli/bundle/config/resources.Grant:
"principal": "principal":
"description": |- "description": |-
@ -459,3 +520,103 @@ github.com/databricks/cli/bundle/config/variable.Variable:
"type": "type":
"description": |- "description": |-
The type of the variable. The type of the variable.
github.com/databricks/databricks-sdk-go/service/apps.AppDeployment:
"create_time":
"description": |-
PLACEHOLDER
"creator":
"description": |-
PLACEHOLDER
"deployment_artifacts":
"description": |-
PLACEHOLDER
"deployment_id":
"description": |-
PLACEHOLDER
"mode":
"description": |-
PLACEHOLDER
"source_code_path":
"description": |-
PLACEHOLDER
"status":
"description": |-
PLACEHOLDER
"update_time":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts:
"source_code_path":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResource:
"description":
"description": |-
PLACEHOLDER
"job":
"description": |-
PLACEHOLDER
"name":
"description": |-
PLACEHOLDER
"secret":
"description": |-
PLACEHOLDER
"serving_endpoint":
"description": |-
PLACEHOLDER
"sql_warehouse":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret:
"key":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
"scope":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint:
"name":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse:
"id":
"description": |-
PLACEHOLDER
"permission":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus:
"message":
"description": |-
PLACEHOLDER
"state":
"description": |-
PLACEHOLDER

View File

@ -51,6 +51,10 @@ var (
CAN_MANAGE: "CAN_MANAGE", CAN_MANAGE: "CAN_MANAGE",
CAN_VIEW: "CAN_READ", CAN_VIEW: "CAN_READ",
}, },
"apps": {
CAN_MANAGE: "CAN_MANAGE",
CAN_VIEW: "CAN_USE",
},
} }
) )

View File

@ -58,6 +58,10 @@ func TestApplyBundlePermissions(t *testing.T) {
"dashboard_1": {}, "dashboard_1": {},
"dashboard_2": {}, "dashboard_2": {},
}, },
Apps: map[string]*resources.App{
"app_1": {},
"app_2": {},
},
}, },
}, },
} }
@ -114,6 +118,10 @@ func TestApplyBundlePermissions(t *testing.T) {
require.Len(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, 2) require.Len(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, 2)
require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"}) require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"})
require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_READ", GroupName: "TestGroup"}) require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_READ", GroupName: "TestGroup"})
require.Len(t, b.Config.Resources.Apps["app_1"].Permissions, 2)
require.Contains(t, b.Config.Resources.Apps["app_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"})
require.Contains(t, b.Config.Resources.Apps["app_1"].Permissions, resources.Permission{Level: "CAN_USE", GroupName: "TestGroup"})
} }
func TestWarningOnOverlapPermission(t *testing.T) { func TestWarningOnOverlapPermission(t *testing.T) {

View File

@ -5,6 +5,7 @@ import (
"errors" "errors"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/apps"
"github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
@ -135,6 +136,8 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
bundle.Seq( bundle.Seq(
terraform.StatePush(), terraform.StatePush(),
terraform.Load(), terraform.Load(),
apps.InterpolateVariables(),
apps.UploadConfig(),
metadata.Compute(), metadata.Compute(),
metadata.Upload(), metadata.Upload(),
bundle.LogString("Deployment complete!"), bundle.LogString("Deployment complete!"),

View File

@ -2,6 +2,7 @@ package phases
import ( import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/apps"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python"
@ -33,10 +34,6 @@ func Initialize() bundle.Mutator {
// If it is an ancestor, this updates all paths to be relative to the sync root path. // If it is an ancestor, this updates all paths to be relative to the sync root path.
mutator.SyncInferRoot(), mutator.SyncInferRoot(),
mutator.MergeJobClusters(),
mutator.MergeJobParameters(),
mutator.MergeJobTasks(),
mutator.MergePipelineClusters(),
mutator.InitializeWorkspaceClient(), mutator.InitializeWorkspaceClient(),
mutator.PopulateCurrentUser(), mutator.PopulateCurrentUser(),
mutator.LoadGitDetails(), mutator.LoadGitDetails(),
@ -70,6 +67,13 @@ func Initialize() bundle.Mutator {
"workspace", "workspace",
"variables", "variables",
), ),
mutator.MergeJobClusters(),
mutator.MergeJobParameters(),
mutator.MergeJobTasks(),
mutator.MergePipelineClusters(),
mutator.MergeApps(),
// Provide permission config errors & warnings after initializing all variables // Provide permission config errors & warnings after initializing all variables
permissions.PermissionDiagnostics(), permissions.PermissionDiagnostics(),
mutator.SetRunAs(), mutator.SetRunAs(),
@ -87,6 +91,8 @@ func Initialize() bundle.Mutator {
mutator.TranslatePaths(), mutator.TranslatePaths(),
trampoline.WrapperWarning(), trampoline.WrapperWarning(),
apps.Validate(),
permissions.ValidateSharedRootPermissions(), permissions.ValidateSharedRootPermissions(),
permissions.ApplyBundlePermissions(), permissions.ApplyBundlePermissions(),
permissions.FilterCurrentUser(), permissions.FilterCurrentUser(),

212
bundle/run/app.go Normal file
View File

@ -0,0 +1,212 @@
package run
import (
"context"
"errors"
"fmt"
"time"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/run/output"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/spf13/cobra"
)
func logProgress(ctx context.Context, msg string) {
if msg == "" {
return
}
cmdio.LogString(ctx, "✓ "+msg)
}
type appRunner struct {
key
bundle *bundle.Bundle
app *resources.App
}
func (a *appRunner) Name() string {
if a.app == nil {
return ""
}
return a.app.Name
}
func isAppStopped(app *apps.App) bool {
return app.ComputeStatus == nil ||
(app.ComputeStatus.State == apps.ComputeStateStopped || app.ComputeStatus.State == apps.ComputeStateError)
}
func (a *appRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, error) {
app := a.app
b := a.bundle
if app == nil {
return nil, errors.New("app is not defined")
}
logProgress(ctx, "Getting the status of the app "+app.Name)
w := b.WorkspaceClient()
// Check the status of the app first.
createdApp, err := w.Apps.Get(ctx, apps.GetAppRequest{Name: app.Name})
if err != nil {
return nil, err
}
if createdApp.AppStatus != nil {
logProgress(ctx, fmt.Sprintf("App is in %s state", createdApp.AppStatus.State))
}
if createdApp.ComputeStatus != nil {
logProgress(ctx, fmt.Sprintf("App compute is in %s state", createdApp.ComputeStatus.State))
}
// There could be 2 reasons why the app is not running:
// 1. The app is new and was never deployed yet.
// 2. The app was stopped (compute not running).
// We need to start the app only if the compute is not running.
if isAppStopped(createdApp) {
err := a.start(ctx)
if err != nil {
return nil, err
}
}
// Deploy the app.
err = a.deploy(ctx)
if err != nil {
return nil, err
}
cmdio.LogString(ctx, "You can access the app at "+createdApp.Url)
return nil, nil
}
func (a *appRunner) start(ctx context.Context) error {
app := a.app
b := a.bundle
w := b.WorkspaceClient()
logProgress(ctx, "Starting the app "+app.Name)
wait, err := w.Apps.Start(ctx, apps.StartAppRequest{Name: app.Name})
if err != nil {
return err
}
startedApp, err := wait.OnProgress(func(p *apps.App) {
if p.AppStatus == nil {
return
}
logProgress(ctx, "App is starting...")
}).Get()
if err != nil {
return err
}
// After the app is started (meaning the compute is running), the API will return the app object with the
// active and pending deployments fields (if any). If there are active or pending deployments,
// we need to wait for them to complete before we can do the new deployment.
// Otherwise, the new deployment will fail.
// Thus, we first wait for the active deployment to complete.
if startedApp.ActiveDeployment != nil &&
startedApp.ActiveDeployment.Status.State == apps.AppDeploymentStateInProgress {
logProgress(ctx, "Waiting for the active deployment to complete...")
_, err = w.Apps.WaitGetDeploymentAppSucceeded(ctx, app.Name, startedApp.ActiveDeployment.DeploymentId, 20*time.Minute, nil)
if err != nil {
return err
}
logProgress(ctx, "Active deployment is completed!")
}
// Then, we wait for the pending deployment to complete.
if startedApp.PendingDeployment != nil &&
startedApp.PendingDeployment.Status.State == apps.AppDeploymentStateInProgress {
logProgress(ctx, "Waiting for the pending deployment to complete...")
_, err = w.Apps.WaitGetDeploymentAppSucceeded(ctx, app.Name, startedApp.PendingDeployment.DeploymentId, 20*time.Minute, nil)
if err != nil {
return err
}
logProgress(ctx, "Pending deployment is completed!")
}
logProgress(ctx, "App is started!")
return nil
}
func (a *appRunner) deploy(ctx context.Context) error {
app := a.app
b := a.bundle
w := b.WorkspaceClient()
wait, err := w.Apps.Deploy(ctx, apps.CreateAppDeploymentRequest{
AppName: app.Name,
AppDeployment: &apps.AppDeployment{
Mode: apps.AppDeploymentModeSnapshot,
SourceCodePath: app.SourceCodePath,
},
})
// If deploy returns an error, then there's an active deployment in progress, wait for it to complete.
if err != nil {
return err
}
_, err = wait.OnProgress(func(ad *apps.AppDeployment) {
if ad.Status == nil {
return
}
logProgress(ctx, ad.Status.Message)
}).Get()
if err != nil {
return err
}
return nil
}
func (a *appRunner) Cancel(ctx context.Context) error {
// We should cancel the app by stopping it.
app := a.app
b := a.bundle
if app == nil {
return errors.New("app is not defined")
}
w := b.WorkspaceClient()
logProgress(ctx, "Stopping app "+app.Name)
wait, err := w.Apps.Stop(ctx, apps.StopAppRequest{Name: app.Name})
if err != nil {
return err
}
_, err = wait.OnProgress(func(p *apps.App) {
if p.AppStatus == nil {
return
}
logProgress(ctx, p.AppStatus.Message)
}).Get()
logProgress(ctx, "App is stopped!")
return err
}
func (a *appRunner) Restart(ctx context.Context, opts *Options) (output.RunOutput, error) {
// We should restart the app by just running it again meaning a new app deployment will be done.
return a.Run(ctx, opts)
}
func (a *appRunner) ParseArgs(args []string, opts *Options) error {
if len(args) == 0 {
return nil
}
return fmt.Errorf("received %d unexpected positional arguments", len(args))
}
func (a *appRunner) CompleteArgs(args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return nil, cobra.ShellCompDirectiveNoFileComp
}

216
bundle/run/app_test.go Normal file
View File

@ -0,0 +1,216 @@
package run
import (
"bytes"
"context"
"os"
"path/filepath"
"testing"
"time"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go/experimental/mocks"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
)
type testAppRunner struct {
m *mocks.MockWorkspaceClient
b *bundle.Bundle
ctx context.Context
}
func (ta *testAppRunner) run(t *testing.T) {
r := appRunner{
key: "my_app",
bundle: ta.b,
app: ta.b.Config.Resources.Apps["my_app"],
}
_, err := r.Run(ta.ctx, &Options{})
require.NoError(t, err)
}
func setupBundle(t *testing.T) (context.Context, *bundle.Bundle, *mocks.MockWorkspaceClient) {
root := t.TempDir()
err := os.MkdirAll(filepath.Join(root, "my_app"), 0o700)
require.NoError(t, err)
b := &bundle.Bundle{
BundleRootPath: root,
SyncRoot: vfs.MustNew(root),
Config: config.Root{
Workspace: config.Workspace{
RootPath: "/Workspace/Users/foo@bar.com/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"my_app": {
App: &apps.App{
Name: "my_app",
},
SourceCodePath: "./my_app",
Config: map[string]any{
"command": []string{"echo", "hello"},
"env": []map[string]string{
{"name": "MY_APP", "value": "my value"},
},
},
},
},
},
},
}
mwc := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(mwc.WorkspaceClient)
bundletest.SetLocation(b, "resources.apps.my_app", []dyn.Location{{File: "./databricks.yml"}})
ctx := context.Background()
ctx = cmdio.InContext(ctx, cmdio.NewIO(ctx, flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "..."))
ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend))
diags := bundle.Apply(ctx, b, bundle.Seq(
mutator.DefineDefaultWorkspacePaths(),
mutator.TranslatePaths(),
))
require.Empty(t, diags)
return ctx, b, mwc
}
func setupTestApp(t *testing.T, initialAppState apps.ApplicationState, initialComputeState apps.ComputeState) *testAppRunner {
ctx, b, mwc := setupBundle(t)
appApi := mwc.GetMockAppsAPI()
appApi.EXPECT().Get(mock.Anything, apps.GetAppRequest{
Name: "my_app",
}).Return(&apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: initialAppState,
},
ComputeStatus: &apps.ComputeStatus{
State: initialComputeState,
},
}, nil)
wait := &apps.WaitGetDeploymentAppSucceeded[apps.AppDeployment]{
Poll: func(_ time.Duration, _ func(*apps.AppDeployment)) (*apps.AppDeployment, error) {
return nil, nil
},
}
appApi.EXPECT().Deploy(mock.Anything, apps.CreateAppDeploymentRequest{
AppName: "my_app",
AppDeployment: &apps.AppDeployment{
Mode: apps.AppDeploymentModeSnapshot,
SourceCodePath: "/Workspace/Users/foo@bar.com/files/my_app",
},
}).Return(wait, nil)
return &testAppRunner{
m: mwc,
b: b,
ctx: ctx,
}
}
func TestAppRunStartedApp(t *testing.T) {
r := setupTestApp(t, apps.ApplicationStateRunning, apps.ComputeStateActive)
r.run(t)
}
func TestAppRunStoppedApp(t *testing.T) {
r := setupTestApp(t, apps.ApplicationStateCrashed, apps.ComputeStateStopped)
appsApi := r.m.GetMockAppsAPI()
appsApi.EXPECT().Start(mock.Anything, apps.StartAppRequest{
Name: "my_app",
}).Return(&apps.WaitGetAppActive[apps.App]{
Poll: func(_ time.Duration, _ func(*apps.App)) (*apps.App, error) {
return &apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: apps.ApplicationStateRunning,
},
ComputeStatus: &apps.ComputeStatus{
State: apps.ComputeStateActive,
},
}, nil
},
}, nil)
r.run(t)
}
func TestAppRunWithAnActiveDeploymentInProgress(t *testing.T) {
r := setupTestApp(t, apps.ApplicationStateCrashed, apps.ComputeStateStopped)
appsApi := r.m.GetMockAppsAPI()
appsApi.EXPECT().Start(mock.Anything, apps.StartAppRequest{
Name: "my_app",
}).Return(&apps.WaitGetAppActive[apps.App]{
Poll: func(_ time.Duration, _ func(*apps.App)) (*apps.App, error) {
return &apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: apps.ApplicationStateRunning,
},
ComputeStatus: &apps.ComputeStatus{
State: apps.ComputeStateActive,
},
ActiveDeployment: &apps.AppDeployment{
DeploymentId: "active_deployment_id",
Status: &apps.AppDeploymentStatus{
State: apps.AppDeploymentStateInProgress,
},
},
PendingDeployment: &apps.AppDeployment{
DeploymentId: "pending_deployment_id",
Status: &apps.AppDeploymentStatus{
State: apps.AppDeploymentStateCancelled,
},
},
}, nil
},
}, nil)
appsApi.EXPECT().WaitGetDeploymentAppSucceeded(mock.Anything, "my_app", "active_deployment_id", mock.Anything, mock.Anything).Return(nil, nil)
r.run(t)
}
func TestStopApp(t *testing.T) {
ctx, b, mwc := setupBundle(t)
appsApi := mwc.GetMockAppsAPI()
appsApi.EXPECT().Stop(mock.Anything, apps.StopAppRequest{
Name: "my_app",
}).Return(&apps.WaitGetAppStopped[apps.App]{
Poll: func(_ time.Duration, _ func(*apps.App)) (*apps.App, error) {
return &apps.App{
Name: "my_app",
AppStatus: &apps.ApplicationStatus{
State: apps.ApplicationStateUnavailable,
},
}, nil
},
}, nil)
r := appRunner{
key: "my_app",
bundle: b,
app: b.Config.Resources.Apps["my_app"],
}
err := r.Cancel(ctx)
require.NoError(t, err)
}

View File

@ -42,7 +42,7 @@ type Runner interface {
// IsRunnable returns a filter that only allows runnable resources. // IsRunnable returns a filter that only allows runnable resources.
func IsRunnable(ref refs.Reference) bool { func IsRunnable(ref refs.Reference) bool {
switch ref.Resource.(type) { switch ref.Resource.(type) {
case *resources.Job, *resources.Pipeline: case *resources.Job, *resources.Pipeline, *resources.App:
return true return true
default: default:
return false return false
@ -56,6 +56,12 @@ func ToRunner(b *bundle.Bundle, ref refs.Reference) (Runner, error) {
return &jobRunner{key: key(ref.KeyWithType), bundle: b, job: resource}, nil return &jobRunner{key: key(ref.KeyWithType), bundle: b, job: resource}, nil
case *resources.Pipeline: case *resources.Pipeline:
return &pipelineRunner{key: key(ref.KeyWithType), bundle: b, pipeline: resource}, nil return &pipelineRunner{key: key(ref.KeyWithType), bundle: b, pipeline: resource}, nil
case *resources.App:
return &appRunner{
key: key(ref.KeyWithType),
bundle: b,
app: resource,
}, nil
default: default:
return nil, fmt.Errorf("unsupported resource type: %T", resource) return nil, fmt.Errorf("unsupported resource type: %T", resource)
} }

View File

@ -59,6 +59,81 @@
"cli": { "cli": {
"bundle": { "bundle": {
"config": { "config": {
"resources.App": {
"oneOf": [
{
"type": "object",
"properties": {
"active_deployment": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeployment"
},
"app_status": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ApplicationStatus"
},
"compute_status": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ComputeStatus"
},
"config": {
"$ref": "#/$defs/map/interface"
},
"create_time": {
"$ref": "#/$defs/string"
},
"creator": {
"$ref": "#/$defs/string"
},
"default_source_code_path": {
"$ref": "#/$defs/string"
},
"description": {
"$ref": "#/$defs/string"
},
"name": {
"$ref": "#/$defs/string"
},
"pending_deployment": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeployment"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"resources": {
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/apps.AppResource"
},
"service_principal_client_id": {
"$ref": "#/$defs/string"
},
"service_principal_id": {
"$ref": "#/$defs/int64"
},
"service_principal_name": {
"$ref": "#/$defs/string"
},
"source_code_path": {
"$ref": "#/$defs/string"
},
"update_time": {
"$ref": "#/$defs/string"
},
"updater": {
"$ref": "#/$defs/string"
},
"url": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"source_code_path",
"name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Cluster": { "resources.Cluster": {
"oneOf": [ "oneOf": [
{ {
@ -1273,6 +1348,9 @@
{ {
"type": "object", "type": "object",
"properties": { "properties": {
"apps": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.App"
},
"clusters": { "clusters": {
"description": "The cluster definitions for the bundle.", "description": "The cluster definitions for the bundle.",
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster", "$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster",
@ -1528,6 +1606,280 @@
}, },
"databricks-sdk-go": { "databricks-sdk-go": {
"service": { "service": {
"apps.AppDeployment": {
"oneOf": [
{
"type": "object",
"properties": {
"create_time": {
"$ref": "#/$defs/string"
},
"creator": {
"$ref": "#/$defs/string"
},
"deployment_artifacts": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentArtifacts"
},
"deployment_id": {
"$ref": "#/$defs/string"
},
"mode": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentMode"
},
"source_code_path": {
"$ref": "#/$defs/string"
},
"status": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentStatus"
},
"update_time": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppDeploymentArtifacts": {
"oneOf": [
{
"type": "object",
"properties": {
"source_code_path": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppDeploymentMode": {
"type": "string"
},
"apps.AppDeploymentState": {
"type": "string"
},
"apps.AppDeploymentStatus": {
"oneOf": [
{
"type": "object",
"properties": {
"message": {
"$ref": "#/$defs/string"
},
"state": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppDeploymentState"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppResource": {
"oneOf": [
{
"type": "object",
"properties": {
"description": {
"$ref": "#/$defs/string"
},
"job": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJob"
},
"name": {
"$ref": "#/$defs/string"
},
"secret": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecret"
},
"serving_endpoint": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpoint"
},
"sql_warehouse": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouse"
}
},
"additionalProperties": false,
"required": [
"name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppResourceJob": {
"oneOf": [
{
"type": "object",
"properties": {
"id": {
"$ref": "#/$defs/string"
},
"permission": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceJobJobPermission"
}
},
"additionalProperties": false,
"required": [
"id",
"permission"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppResourceJobJobPermission": {
"type": "string"
},
"apps.AppResourceSecret": {
"oneOf": [
{
"type": "object",
"properties": {
"key": {
"$ref": "#/$defs/string"
},
"permission": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSecretSecretPermission"
},
"scope": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"key",
"permission",
"scope"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppResourceSecretSecretPermission": {
"type": "string"
},
"apps.AppResourceServingEndpoint": {
"oneOf": [
{
"type": "object",
"properties": {
"name": {
"$ref": "#/$defs/string"
},
"permission": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceServingEndpointServingEndpointPermission"
}
},
"additionalProperties": false,
"required": [
"name",
"permission"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppResourceServingEndpointServingEndpointPermission": {
"type": "string"
},
"apps.AppResourceSqlWarehouse": {
"oneOf": [
{
"type": "object",
"properties": {
"id": {
"$ref": "#/$defs/string"
},
"permission": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResourceSqlWarehouseSqlWarehousePermission"
}
},
"additionalProperties": false,
"required": [
"id",
"permission"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.AppResourceSqlWarehouseSqlWarehousePermission": {
"type": "string"
},
"apps.ApplicationState": {
"type": "string"
},
"apps.ApplicationStatus": {
"oneOf": [
{
"type": "object",
"properties": {
"message": {
"$ref": "#/$defs/string"
},
"state": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ApplicationState"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"apps.ComputeState": {
"type": "string"
},
"apps.ComputeStatus": {
"oneOf": [
{
"type": "object",
"properties": {
"message": {
"$ref": "#/$defs/string"
},
"state": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.ComputeState"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorCronSchedule": { "catalog.MonitorCronSchedule": {
"oneOf": [ "oneOf": [
{ {
@ -5718,6 +6070,20 @@
"cli": { "cli": {
"bundle": { "bundle": {
"config": { "config": {
"resources.App": {
"oneOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.App"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Cluster": { "resources.Cluster": {
"oneOf": [ "oneOf": [
{ {
@ -5947,6 +6313,20 @@
} }
} }
}, },
"interface": {
"oneOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/interface"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"string": { "string": {
"oneOf": [ "oneOf": [
{ {
@ -6015,6 +6395,20 @@
}, },
"databricks-sdk-go": { "databricks-sdk-go": {
"service": { "service": {
"apps.AppResource": {
"oneOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/apps.AppResource"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorMetric": { "catalog.MonitorMetric": {
"oneOf": [ "oneOf": [
{ {

View File

@ -0,0 +1,71 @@
bundle:
name: apps
workspace:
host: https://acme.cloud.databricks.com/
variables:
app_config:
type: complex
default:
command:
- "python"
- "app.py"
env:
- name: SOME_ENV_VARIABLE
value: "Some value"
resources:
apps:
my_app:
name: "my-app"
description: "My App"
source_code_path: ./app
config: ${var.app_config}
resources:
- name: "my-sql-warehouse"
sql_warehouse:
id: 1234
permission: "CAN_USE"
- name: "my-job"
job:
id: 5678
permission: "CAN_MANAGE_RUN"
permissions:
- user_name: "foo@bar.com"
level: "CAN_VIEW"
- service_principal_name: "my_sp"
level: "CAN_MANAGE"
targets:
default:
development:
variables:
app_config:
command:
- "python"
- "dev.py"
env:
- name: SOME_ENV_VARIABLE_2
value: "Some value 2"
resources:
apps:
my_app:
source_code_path: ./app-dev
resources:
- name: "my-sql-warehouse"
sql_warehouse:
id: 1234
permission: "CAN_MANAGE"
- name: "my-job"
job:
id: 5678
permission: "CAN_MANAGE"
- name: "my-secret"
secret:
key: "key"
scope: "scope"
permission: "CAN_USE"

60
bundle/tests/apps_test.go Normal file
View File

@ -0,0 +1,60 @@
package config_tests
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/stretchr/testify/assert"
)
func TestApps(t *testing.T) {
b := load(t, "./apps")
assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b,
bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"),
))
assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"]
assert.Equal(t, "my-app", app.Name)
assert.Equal(t, "My App", app.Description)
assert.Equal(t, []any{"python", "app.py"}, app.Config["command"])
assert.Equal(t, []any{map[string]any{"name": "SOME_ENV_VARIABLE", "value": "Some value"}}, app.Config["env"])
assert.Len(t, app.Resources, 2)
assert.Equal(t, "1234", app.Resources[0].SqlWarehouse.Id)
assert.Equal(t, "CAN_USE", string(app.Resources[0].SqlWarehouse.Permission))
assert.Equal(t, "5678", app.Resources[1].Job.Id)
assert.Equal(t, "CAN_MANAGE_RUN", string(app.Resources[1].Job.Permission))
}
func TestAppsOverride(t *testing.T) {
b := loadTarget(t, "./apps", "development")
assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b,
bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"),
))
assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"]
assert.Equal(t, "my-app", app.Name)
assert.Equal(t, "My App", app.Description)
assert.Equal(t, []any{"python", "dev.py"}, app.Config["command"])
assert.Equal(t, []any{map[string]any{"name": "SOME_ENV_VARIABLE_2", "value": "Some value 2"}}, app.Config["env"])
assert.Len(t, app.Resources, 3)
assert.Equal(t, "1234", app.Resources[0].SqlWarehouse.Id)
assert.Equal(t, "CAN_MANAGE", string(app.Resources[0].SqlWarehouse.Permission))
assert.Equal(t, "5678", app.Resources[1].Job.Id)
assert.Equal(t, "CAN_MANAGE", string(app.Resources[1].Job.Permission))
assert.Equal(t, "key", app.Resources[2].Secret.Key)
assert.Equal(t, "scope", app.Resources[2].Secret.Scope)
assert.Equal(t, "CAN_USE", string(app.Resources[2].Secret.Permission))
}

View File

@ -47,6 +47,7 @@ func loadTargetWithDiags(path, env string) (*bundle.Bundle, diag.Diagnostics) {
mutator.MergeJobParameters(), mutator.MergeJobParameters(),
mutator.MergeJobTasks(), mutator.MergeJobTasks(),
mutator.MergePipelineClusters(), mutator.MergePipelineClusters(),
mutator.MergeApps(),
)) ))
return b, diags return b, diags
} }

View File

@ -17,6 +17,7 @@ func newGenerateCommand() *cobra.Command {
cmd.AddCommand(generate.NewGenerateJobCommand()) cmd.AddCommand(generate.NewGenerateJobCommand())
cmd.AddCommand(generate.NewGeneratePipelineCommand()) cmd.AddCommand(generate.NewGeneratePipelineCommand())
cmd.AddCommand(generate.NewGenerateDashboardCommand()) cmd.AddCommand(generate.NewGenerateDashboardCommand())
cmd.AddCommand(generate.NewGenerateAppCommand())
cmd.PersistentFlags().StringVar(&key, "key", "", `resource key to use for the generated configuration`) cmd.PersistentFlags().StringVar(&key, "key", "", `resource key to use for the generated configuration`)
return cmd return cmd
} }

166
cmd/bundle/generate/app.go Normal file
View File

@ -0,0 +1,166 @@
package generate
import (
"context"
"errors"
"fmt"
"io"
"io/fs"
"path/filepath"
"github.com/databricks/cli/bundle/config/generate"
"github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/yamlsaver"
"github.com/databricks/cli/libs/filer"
"github.com/databricks/cli/libs/textutil"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/spf13/cobra"
"gopkg.in/yaml.v3"
)
func NewGenerateAppCommand() *cobra.Command {
var configDir string
var sourceDir string
var appName string
var force bool
cmd := &cobra.Command{
Use: "app",
Short: "Generate bundle configuration for a Databricks app",
}
cmd.Flags().StringVar(&appName, "existing-app-name", "", `App name to generate config for`)
cmd.MarkFlagRequired("existing-app-name")
cmd.Flags().StringVarP(&configDir, "config-dir", "d", filepath.Join("resources"), `Directory path where the output bundle config will be stored`)
cmd.Flags().StringVarP(&sourceDir, "source-dir", "s", filepath.Join("src", "app"), `Directory path where the app files will be stored`)
cmd.Flags().BoolVarP(&force, "force", "f", false, `Force overwrite existing files in the output directory`)
cmd.RunE = func(cmd *cobra.Command, args []string) error {
ctx := cmd.Context()
b, diags := root.MustConfigureBundle(cmd)
if err := diags.Error(); err != nil {
return diags.Error()
}
w := b.WorkspaceClient()
cmdio.LogString(ctx, fmt.Sprintf("Loading app '%s' configuration", appName))
app, err := w.Apps.Get(ctx, apps.GetAppRequest{Name: appName})
if err != nil {
return err
}
// Making sure the config directory and source directory are absolute paths.
if !filepath.IsAbs(configDir) {
configDir = filepath.Join(b.BundleRootPath, configDir)
}
if !filepath.IsAbs(sourceDir) {
sourceDir = filepath.Join(b.BundleRootPath, sourceDir)
}
downloader := newDownloader(w, sourceDir, configDir)
sourceCodePath := app.DefaultSourceCodePath
err = downloader.markDirectoryForDownload(ctx, &sourceCodePath)
if err != nil {
return err
}
appConfig, err := getAppConfig(ctx, app, w)
if err != nil {
return fmt.Errorf("failed to get app config: %w", err)
}
// Making sure the source code path is relative to the config directory.
rel, err := filepath.Rel(configDir, sourceDir)
if err != nil {
return err
}
v, err := generate.ConvertAppToValue(app, filepath.ToSlash(rel), appConfig)
if err != nil {
return err
}
appKey := cmd.Flag("key").Value.String()
if appKey == "" {
appKey = textutil.NormalizeString(app.Name)
}
result := map[string]dyn.Value{
"resources": dyn.V(map[string]dyn.Value{
"apps": dyn.V(map[string]dyn.Value{
appKey: v,
}),
}),
}
// If there are app.yaml or app.yml files in the source code path, they will be downloaded but we don't want to include them in the bundle.
// We include this configuration inline, so we need to remove these files.
for _, configFile := range []string{"app.yml", "app.yaml"} {
delete(downloader.files, filepath.Join(sourceDir, configFile))
}
err = downloader.FlushToDisk(ctx, force)
if err != nil {
return err
}
filename := filepath.Join(configDir, appKey+".app.yml")
saver := yamlsaver.NewSaver()
err = saver.SaveAsYAML(result, filename, force)
if err != nil {
return err
}
cmdio.LogString(ctx, "App configuration successfully saved to "+filename)
return nil
}
return cmd
}
func getAppConfig(ctx context.Context, app *apps.App, w *databricks.WorkspaceClient) (map[string]any, error) {
sourceCodePath := app.DefaultSourceCodePath
f, err := filer.NewWorkspaceFilesClient(w, sourceCodePath)
if err != nil {
return nil, err
}
// The app config is stored in app.yml or app.yaml file in the source code path.
configFileNames := []string{"app.yml", "app.yaml"}
for _, configFile := range configFileNames {
r, err := f.Read(ctx, configFile)
if err != nil {
if errors.Is(err, fs.ErrNotExist) {
continue
}
return nil, err
}
defer r.Close()
cmdio.LogString(ctx, "Reading app configuration from "+configFile)
content, err := io.ReadAll(r)
if err != nil {
return nil, err
}
var appConfig map[string]any
err = yaml.Unmarshal(content, &appConfig)
if err != nil {
cmdio.LogString(ctx, fmt.Sprintf("Failed to parse app configuration:\n%s\nerr: %v", string(content), err))
return nil, nil
}
return appConfig, nil
}
return nil, nil
}

View File

@ -13,6 +13,7 @@ import (
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/databricks/databricks-sdk-go/service/pipelines"
"github.com/databricks/databricks-sdk-go/service/workspace"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
@ -63,6 +64,37 @@ func (n *downloader) markFileForDownload(ctx context.Context, filePath *string)
return nil return nil
} }
func (n *downloader) markDirectoryForDownload(ctx context.Context, dirPath *string) error {
_, err := n.w.Workspace.GetStatusByPath(ctx, *dirPath)
if err != nil {
return err
}
objects, err := n.w.Workspace.RecursiveList(ctx, *dirPath)
if err != nil {
return err
}
for _, obj := range objects {
if obj.ObjectType == workspace.ObjectTypeDirectory {
continue
}
err := n.markFileForDownload(ctx, &obj.Path)
if err != nil {
return err
}
}
rel, err := filepath.Rel(n.configDir, n.sourceDir)
if err != nil {
return err
}
*dirPath = rel
return nil
}
func (n *downloader) markNotebookForDownload(ctx context.Context, notebookPath *string) error { func (n *downloader) markNotebookForDownload(ctx context.Context, notebookPath *string) error {
info, err := n.w.Workspace.GetStatusByPath(ctx, *notebookPath) info, err := n.w.Workspace.GetStatusByPath(ctx, *notebookPath)
if err != nil { if err != nil {

10
go.mod
View File

@ -11,7 +11,7 @@ require (
github.com/fatih/color v1.18.0 // MIT github.com/fatih/color v1.18.0 // MIT
github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/google/uuid v1.6.0 // BSD-3-Clause
github.com/hashicorp/go-version v1.7.0 // MPL 2.0 github.com/hashicorp/go-version v1.7.0 // MPL 2.0
github.com/hashicorp/hc-install v0.9.0 // MPL 2.0 github.com/hashicorp/hc-install v0.9.1 // MPL 2.0
github.com/hashicorp/terraform-exec v0.21.0 // MPL 2.0 github.com/hashicorp/terraform-exec v0.21.0 // MPL 2.0
github.com/hashicorp/terraform-json v0.23.0 // MPL 2.0 github.com/hashicorp/terraform-json v0.23.0 // MPL 2.0
github.com/hexops/gotextdiff v1.0.3 // BSD 3-Clause "New" or "Revised" License github.com/hexops/gotextdiff v1.0.3 // BSD 3-Clause "New" or "Revised" License
@ -26,9 +26,9 @@ require (
github.com/wI2L/jsondiff v0.6.1 // MIT github.com/wI2L/jsondiff v0.6.1 // MIT
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
golang.org/x/mod v0.22.0 golang.org/x/mod v0.22.0
golang.org/x/oauth2 v0.24.0 golang.org/x/oauth2 v0.25.0
golang.org/x/sync v0.10.0 golang.org/x/sync v0.10.0
golang.org/x/term v0.27.0 golang.org/x/term v0.28.0
golang.org/x/text v0.21.0 golang.org/x/text v0.21.0
gopkg.in/ini.v1 v1.67.0 // Apache 2.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
@ -38,7 +38,7 @@ require (
cloud.google.com/go/auth v0.4.2 // indirect cloud.google.com/go/auth v0.4.2 // indirect
cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect
cloud.google.com/go/compute/metadata v0.3.0 // indirect cloud.google.com/go/compute/metadata v0.3.0 // indirect
github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect github.com/ProtonMail/go-crypto v1.1.3 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e // indirect github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e // indirect
github.com/cloudflare/circl v1.3.7 // indirect github.com/cloudflare/circl v1.3.7 // indirect
@ -69,7 +69,7 @@ require (
go.opentelemetry.io/otel/trace v1.24.0 // indirect go.opentelemetry.io/otel/trace v1.24.0 // indirect
golang.org/x/crypto v0.31.0 // indirect golang.org/x/crypto v0.31.0 // indirect
golang.org/x/net v0.33.0 // indirect golang.org/x/net v0.33.0 // indirect
golang.org/x/sys v0.28.0 // indirect golang.org/x/sys v0.29.0 // indirect
golang.org/x/time v0.5.0 // indirect golang.org/x/time v0.5.0 // indirect
google.golang.org/api v0.182.0 // indirect google.golang.org/api v0.182.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect

36
go.sum generated
View File

@ -12,8 +12,8 @@ github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7r
github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg= github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk=
github.com/ProtonMail/go-crypto v1.1.0-alpha.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
github.com/briandowns/spinner v1.23.1 h1:t5fDPmScwUjozhDj4FA46p5acZWIPXYE30qW2Ptu650= github.com/briandowns/spinner v1.23.1 h1:t5fDPmScwUjozhDj4FA46p5acZWIPXYE30qW2Ptu650=
@ -30,8 +30,8 @@ github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vc
github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo=
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/databricks/databricks-sdk-go v0.54.0 h1:L8gsA3NXs+uYU3QtW/OUgjxMQxOH24k0MT9JhB3zLlM= github.com/databricks/databricks-sdk-go v0.54.0 h1:L8gsA3NXs+uYU3QtW/OUgjxMQxOH24k0MT9JhB3zLlM=
github.com/databricks/databricks-sdk-go v0.54.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= github.com/databricks/databricks-sdk-go v0.54.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -50,10 +50,10 @@ github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= github.com/go-git/go-billy/v5 v5.6.0 h1:w2hPNtoehvJIxR00Vb4xX94qHQi/ApZfX+nBE2Cjio8=
github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= github.com/go-git/go-billy/v5 v5.6.0/go.mod h1:sFDq7xD3fn3E0GOwUSZqHo9lrkmx8xJhA0ZrfvjBRGM=
github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys= github.com/go-git/go-git/v5 v5.13.0 h1:vLn5wlGIh/X78El6r3Jr+30W16Blk0CTcxTYcYPWi5E=
github.com/go-git/go-git/v5 v5.12.0/go.mod h1:FTM9VKtnI2m65hNI/TenDDDnUf2Q9FHnXYjuz9i5OEY= github.com/go-git/go-git/v5 v5.13.0/go.mod h1:Wjo7/JyVKtQgUNdXYXIepzWfJQkUEIGvkvVkiXRR/zw=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@ -103,8 +103,8 @@ github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISH
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/hc-install v0.9.0 h1:2dIk8LcvANwtv3QZLckxcjyF5w8KVtiMxu6G6eLhghE= github.com/hashicorp/hc-install v0.9.1 h1:gkqTfE3vVbafGQo6VZXcy2v5yoz2bE0+nhZXruCuODQ=
github.com/hashicorp/hc-install v0.9.0/go.mod h1:+6vOP+mf3tuGgMApVYtmsnDoKWMDcFXeTxCACYZ8SFg= github.com/hashicorp/hc-install v0.9.1/go.mod h1:pWWvN/IrfeBK4XPeXXYkL6EjMufHkCK5DvwxeLKuBf0=
github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVWkd/RG0D2XQ= github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVWkd/RG0D2XQ=
github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg= github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg=
github.com/hashicorp/terraform-json v0.23.0 h1:sniCkExU4iKtTADReHzACkk8fnpQXrdD2xoR+lppBkI= github.com/hashicorp/terraform-json v0.23.0 h1:sniCkExU4iKtTADReHzACkk8fnpQXrdD2xoR+lppBkI=
@ -141,8 +141,8 @@ github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDj
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A= github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY=
github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M=
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
@ -207,8 +207,8 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I= golang.org/x/net v0.33.0 h1:74SYHlV8BIgHIFC/LrYkOGIwL19eTYXQ5wc6TBuO36I=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE= golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -224,10 +224,10 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=

View File

@ -0,0 +1,113 @@
package bundle_test
import (
"fmt"
"io"
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/databricks/databricks-sdk-go/service/apps"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
func TestDeployBundleWithApp(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
// TODO: should only skip app run when app can be created with no_compute option.
if testing.Short() {
t.Log("Skip the app creation and run in short mode")
return
}
if testutil.GetCloud(t) == testutil.GCP {
t.Skip("Skipping test for GCP cloud because /api/2.0/apps is temporarily unavailable there.")
}
uniqueId := uuid.New().String()
appId := "app-%s" + uuid.New().String()[0:8]
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
root := initTestTemplate(t, ctx, "apps", map[string]any{
"unique_id": uniqueId,
"app_id": appId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
"instance_pool_id": instancePoolId,
})
t.Cleanup(func() {
destroyBundle(t, ctx, root)
app, err := wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: "test-app"})
if err != nil {
require.ErrorContains(t, err, "does not exist")
} else {
require.Contains(t, []apps.ApplicationState{apps.ApplicationStateUnavailable}, app.AppStatus.State)
}
})
deployBundle(t, ctx, root)
// App should exists after bundle deployment
app, err := wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: appId})
require.NoError(t, err)
require.NotNil(t, app)
// Check app config
currentUser, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
pathToAppYml := fmt.Sprintf("/Workspace/Users/%s/.bundle/%s/files/app/app.yml", currentUser.UserName, uniqueId)
reader, err := wt.W.Workspace.Download(ctx, pathToAppYml)
require.NoError(t, err)
data, err := io.ReadAll(reader)
require.NoError(t, err)
job, err := wt.W.Jobs.GetBySettingsName(ctx, "test-job-with-cluster-"+uniqueId)
require.NoError(t, err)
content := string(data)
require.Contains(t, content, fmt.Sprintf(`command:
- flask
- --app
- app
- run
env:
- name: JOB_ID
value: "%d"`, job.JobId))
// Try to run the app
_, out := runResourceWithStderr(t, ctx, root, "test_app")
require.Contains(t, out, app.Url)
// App should be in the running state
app, err = wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: appId})
require.NoError(t, err)
require.NotNil(t, app)
require.Equal(t, apps.ApplicationStateRunning, app.AppStatus.State)
// Stop the app
wait, err := wt.W.Apps.Stop(ctx, apps.StopAppRequest{Name: appId})
require.NoError(t, err)
app, err = wait.Get()
require.NoError(t, err)
require.NotNil(t, app)
require.Equal(t, apps.ApplicationStateUnavailable, app.AppStatus.State)
// Try to run the app again
_, out = runResourceWithStderr(t, ctx, root, "test_app")
require.Contains(t, out, app.Url)
// App should be in the running state
app, err = wt.W.Apps.Get(ctx, apps.GetAppRequest{Name: appId})
require.NoError(t, err)
require.NotNil(t, app)
require.Equal(t, apps.ApplicationStateRunning, app.AppStatus.State)
// Redeploy it again just to check that it can be redeployed
deployBundle(t, ctx, root)
}

View File

@ -0,0 +1,24 @@
{
"properties": {
"unique_id": {
"type": "string",
"description": "Unique ID for job name"
},
"app_id": {
"type": "string",
"description": "Unique ID for app name"
},
"spark_version": {
"type": "string",
"description": "Spark version used for job cluster"
},
"node_type_id": {
"type": "string",
"description": "Node type id for job cluster"
},
"instance_pool_id": {
"type": "string",
"description": "Instance pool id for job cluster"
}
}
}

View File

@ -0,0 +1,15 @@
import os
from databricks.sdk import WorkspaceClient
from flask import Flask
app = Flask(__name__)
@app.route("/")
def home():
job_id = os.getenv("JOB_ID")
w = WorkspaceClient()
job = w.jobs.get(job_id)
return job.settings.name

View File

@ -0,0 +1,42 @@
bundle:
name: basic
workspace:
root_path: "~/.bundle/{{.unique_id}}"
resources:
apps:
test_app:
name: "{{.app_id}}"
description: "App which manages job created by this bundle"
source_code_path: ./app
config:
command:
- flask
- --app
- app
- run
env:
- name: JOB_ID
value: ${resources.jobs.foo.id}
resources:
- name: "app-job"
description: "A job for app to be able to work with"
job:
id: ${resources.jobs.foo.id}
permission: "CAN_MANAGE_RUN"
jobs:
foo:
name: test-job-with-cluster-{{.unique_id}}
tasks:
- task_key: my_notebook_task
new_cluster:
num_workers: 1
spark_version: "{{.spark_version}}"
node_type_id: "{{.node_type_id}}"
data_security_mode: USER_ISOLATION
instance_pool_id: "{{.instance_pool_id}}"
spark_python_task:
python_file: ./hello_world.py

View File

@ -0,0 +1 @@
print("Hello World!")

View File

@ -119,6 +119,17 @@ func runResource(t testutil.TestingT, ctx context.Context, path, key string) (st
return stdout.String(), err return stdout.String(), err
} }
func runResourceWithStderr(t testutil.TestingT, ctx context.Context, path, key string) (string, string) {
ctx = env.Set(ctx, "BUNDLE_ROOT", path)
ctx = cmdio.NewContext(ctx, cmdio.Default())
c := testcli.NewRunner(t, ctx, "bundle", "run", key)
stdout, stderr, err := c.Run()
require.NoError(t, err)
return stdout.String(), stderr.String()
}
func runResourceWithParams(t testutil.TestingT, ctx context.Context, path, key string, params ...string) (string, error) { func runResourceWithParams(t testutil.TestingT, ctx context.Context, path, key string, params ...string) (string, error) {
ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = env.Set(ctx, "BUNDLE_ROOT", path)
ctx = cmdio.NewContext(ctx, cmdio.Default()) ctx = cmdio.NewContext(ctx, cmdio.Default())

View File

@ -7,7 +7,7 @@ type elementsByKey struct {
keyFunc func(dyn.Value) string keyFunc func(dyn.Value) string
} }
func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) { func (e elementsByKey) doMap(_ dyn.Path, v dyn.Value, mergeFunc func(a, b dyn.Value) (dyn.Value, error)) (dyn.Value, error) {
// We know the type of this value is a sequence. // We know the type of this value is a sequence.
// For additional defence, return self if it is not. // For additional defence, return self if it is not.
elements, ok := v.AsSequence() elements, ok := v.AsSequence()
@ -33,7 +33,7 @@ func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
} }
// Merge this instance into the reference. // Merge this instance into the reference.
nv, err := Merge(ref, elements[i]) nv, err := mergeFunc(ref, elements[i])
if err != nil { if err != nil {
return v, err return v, err
} }
@ -55,6 +55,26 @@ func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
return dyn.NewValue(out, v.Locations()), nil return dyn.NewValue(out, v.Locations()), nil
} }
func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
return e.doMap(nil, v, Merge)
}
func (e elementsByKey) MapWithOverride(p dyn.Path, v dyn.Value) (dyn.Value, error) {
return e.doMap(nil, v, func(a, b dyn.Value) (dyn.Value, error) {
return Override(a, b, OverrideVisitor{
VisitInsert: func(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
return v, nil
},
VisitDelete: func(valuePath dyn.Path, left dyn.Value) error {
return nil
},
VisitUpdate: func(_ dyn.Path, a, b dyn.Value) (dyn.Value, error) {
return b, nil
},
})
})
}
// ElementsByKey returns a [dyn.MapFunc] that operates on a sequence // ElementsByKey returns a [dyn.MapFunc] that operates on a sequence
// where each element is a map. It groups elements by a key and merges // where each element is a map. It groups elements by a key and merges
// elements with the same key. // elements with the same key.
@ -65,3 +85,7 @@ func (e elementsByKey) Map(_ dyn.Path, v dyn.Value) (dyn.Value, error) {
func ElementsByKey(key string, keyFunc func(dyn.Value) string) dyn.MapFunc { func ElementsByKey(key string, keyFunc func(dyn.Value) string) dyn.MapFunc {
return elementsByKey{key, keyFunc}.Map return elementsByKey{key, keyFunc}.Map
} }
func ElementsByKeyWithOverride(key string, keyFunc func(dyn.Value) string) dyn.MapFunc {
return elementsByKey{key, keyFunc}.MapWithOverride
}

View File

@ -50,3 +50,42 @@ func TestElementByKey(t *testing.T) {
}, },
) )
} }
func TestElementByKeyWithOverride(t *testing.T) {
vin := dyn.V([]dyn.Value{
dyn.V(map[string]dyn.Value{
"key": dyn.V("foo"),
"value": dyn.V(42),
}),
dyn.V(map[string]dyn.Value{
"key": dyn.V("bar"),
"value": dyn.V(43),
}),
dyn.V(map[string]dyn.Value{
"key": dyn.V("foo"),
"othervalue": dyn.V(44),
}),
})
keyFunc := func(v dyn.Value) string {
return strings.ToLower(v.MustString())
}
vout, err := dyn.MapByPath(vin, dyn.EmptyPath, ElementsByKeyWithOverride("key", keyFunc))
require.NoError(t, err)
assert.Len(t, vout.MustSequence(), 2)
assert.Equal(t,
vout.Index(0).AsAny(),
map[string]any{
"key": "foo",
"othervalue": 44,
},
)
assert.Equal(t,
vout.Index(1).AsAny(),
map[string]any{
"key": "bar",
"value": 43,
},
)
}

View File

@ -185,8 +185,6 @@ func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.
u.DisplayName, u.DisplayName,
u.UserName, u.UserName,
iamutil.GetShortUserName(&u), iamutil.GetShortUserName(&u),
u.Name.FamilyName,
u.Name.GivenName,
} }
if u.Name != nil { if u.Name != nil {
names = append(names, u.Name.FamilyName) names = append(names, u.Name.FamilyName)