databricks-cli/bundle/schema/jsonschema.json

6022 lines
307 KiB
JSON
Raw Permalink Normal View History

Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
{
"$defs": {
"bool": {
"anyOf": [
{
"type": "boolean"
},
{
"type": "string",
"pattern": "\\$\\{(resources(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(bundle(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(workspace(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(artifacts(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"float64": {
"anyOf": [
{
"type": "number"
},
{
"type": "string",
"pattern": "\\$\\{(resources(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(bundle(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(workspace(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(artifacts(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"github.com": {
"databricks": {
"cli": {
"bundle": {
"config": {
"resources.Cluster": {
"anyOf": [
{
"type": "object",
"properties": {
"apply_policy_default_values": {
"description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.",
"$ref": "#/$defs/bool"
},
"autoscale": {
"description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale"
},
"autotermination_minutes": {
"description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.",
"$ref": "#/$defs/int"
},
"aws_attributes": {
"description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes"
},
"azure_attributes": {
"description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes"
},
"cluster_log_conf": {
"description": "The configuration for delivering spark logs to a long-term storage destination.\nTwo kinds of destinations (dbfs and s3) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf"
},
"cluster_name": {
"description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\n",
"$ref": "#/$defs/string"
},
"custom_tags": {
"description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags",
"$ref": "#/$defs/map/string"
},
"data_security_mode": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode"
},
"docker_image": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerImage"
},
"driver_instance_pool_id": {
"description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.",
"$ref": "#/$defs/string"
},
"driver_node_type_id": {
"description": "The node type of the Spark driver. Note that this field is optional;\nif unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n",
"$ref": "#/$defs/string"
},
"enable_elastic_disk": {
"description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space. This feature requires specific AWS\npermissions to function correctly - refer to the User Guide for more details.",
"$ref": "#/$defs/bool"
},
"enable_local_disk_encryption": {
"description": "Whether to enable LUKS on cluster VMs' local disks",
"$ref": "#/$defs/bool"
},
"gcp_attributes": {
"description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes"
},
"init_scripts": {
"description": "The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo"
},
"instance_pool_id": {
"description": "The optional ID of the instance pool to which the cluster belongs.",
"$ref": "#/$defs/string"
},
"node_type_id": {
"description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.\n",
"$ref": "#/$defs/string"
},
"num_workers": {
"description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.",
"$ref": "#/$defs/int"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"policy_id": {
"description": "The ID of the cluster policy used to create the cluster if applicable.",
"$ref": "#/$defs/string"
},
"runtime_engine": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine"
},
"single_user_name": {
"description": "Single user name if data_security_mode is `SINGLE_USER`",
"$ref": "#/$defs/string"
},
"spark_conf": {
"description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.\n",
"$ref": "#/$defs/map/string"
},
"spark_env_vars": {
"description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`",
"$ref": "#/$defs/map/string"
},
"spark_version": {
"description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.\n",
"$ref": "#/$defs/string"
},
"ssh_public_keys": {
"description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.",
"$ref": "#/$defs/slice/string"
},
"workload_type": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Dashboard": {
"anyOf": [
{
"type": "object",
"properties": {
"display_name": {
"description": "The display name of the dashboard.",
"$ref": "#/$defs/string"
},
"embed_credentials": {
"$ref": "#/$defs/bool"
},
"file_path": {
"$ref": "#/$defs/string"
},
"parent_path": {
"description": "The workspace path of the folder containing the dashboard. Includes leading slash and no\ntrailing slash.\nThis field is excluded in List Dashboards responses.",
"$ref": "#/$defs/string"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"serialized_dashboard": {
"description": "The contents of the dashboard in serialized string form.\nThis field is excluded in List Dashboards responses.\nUse the [get dashboard API](https://docs.databricks.com/api/workspace/lakeview/get)\nto retrieve an example response, which includes the `serialized_dashboard` field.\nThis field provides the structure of the JSON string that represents the dashboard's\nlayout and components.",
"$ref": "#/$defs/interface"
},
"warehouse_id": {
"description": "The warehouse ID used to run the dashboard.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"display_name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"resources.Grant": {
"anyOf": [
{
"type": "object",
"properties": {
"principal": {
"$ref": "#/$defs/string"
},
"privileges": {
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false,
"required": [
"privileges",
"principal"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Job": {
"anyOf": [
{
"type": "object",
"properties": {
Bump github.com/databricks/databricks-sdk-go from 0.48.0 to 0.49.0 (#1843) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.48.0 to 0.49.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/b47ecac9ea73eeb0acaa4691ebdc2a8e8c2c03b9"><code>b47ecac</code></a> [Release] Release v0.49.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1062">#1062</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.48.0...v0.49.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.48.0&new-version=0.49.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-22 09:37:01 +00:00
"budget_policy_id": {
"description": "The id of the user specified budget policy to use for this job.\nIf not specified, a default budget policy may be applied when creating or modifying the job.\nSee `effective_budget_policy_id` for the budget policy used by this workload.",
"$ref": "#/$defs/string"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"continuous": {
"description": "An optional continuous property for this job. The continuous property will ensure that there is always one run executing. Only one of `schedule` and `continuous` can be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Continuous"
},
"description": {
"description": "An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.",
"$ref": "#/$defs/string"
},
"email_notifications": {
"description": "An optional set of email addresses that is notified when runs of this job begin or complete as well as when this job is deleted.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobEmailNotifications"
},
"environments": {
"description": "A list of task execution environment specifications that can be referenced by serverless tasks of this job.\nAn environment is required to be present for serverless tasks.\nFor serverless notebook tasks, the environment is accessible in the notebook environment panel.\nFor other serverless tasks, the task environment is required to be specified using environment_key in the task settings.",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment"
},
"git_source": {
"description": "An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.\n\nIf `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.\n\nNote: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitSource"
},
"health": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules"
},
"job_clusters": {
"description": "A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster"
},
"max_concurrent_runs": {
"description": "An optional maximum allowed number of concurrent runs of the job.\nSet this value if you want to be able to execute multiple runs of the same job concurrently.\nThis is useful for example if you trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each other, or if you want to trigger multiple runs which differ by their input parameters.\nThis setting affects only new runs. For example, suppose the jobs concurrency is 4 and there are 4 concurrent active runs. Then setting the concurrency to 3 wont kill any of the active runs.\nHowever, from then on, new runs are skipped unless there are fewer than 3 active runs.\nThis value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.",
"$ref": "#/$defs/int"
},
"name": {
"description": "An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.",
"$ref": "#/$defs/string"
},
"notification_settings": {
"description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this job.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobNotificationSettings"
},
"parameters": {
"description": "Job-level parameter definitions",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"queue": {
"description": "The queue settings of the job.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.QueueSettings"
},
"run_as": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs"
},
"schedule": {
"description": "An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.CronSchedule"
},
"tags": {
"description": "A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added to the job.",
"$ref": "#/$defs/map/string"
},
"tasks": {
"description": "A list of task specifications to be executed by this job.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Task"
},
"timeout_seconds": {
"description": "An optional timeout applied to each run of this job. A value of `0` means no timeout.",
"$ref": "#/$defs/int"
},
"trigger": {
"description": "A configuration to trigger a run when certain conditions are met. The default behavior is that the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TriggerSettings"
},
"webhook_notifications": {
"description": "A collection of system notification IDs to notify when runs of this job begin or complete.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.MlflowExperiment": {
"anyOf": [
{
"type": "object",
"properties": {
"artifact_location": {
"description": "Location where artifacts for the experiment are stored.",
"$ref": "#/$defs/string"
},
"creation_time": {
"description": "Creation time",
"$ref": "#/$defs/int64"
},
"experiment_id": {
"description": "Unique identifier for the experiment.",
"$ref": "#/$defs/string"
},
"last_update_time": {
"description": "Last update time",
"$ref": "#/$defs/int64"
},
"lifecycle_stage": {
"description": "Current life cycle stage of the experiment: \"active\" or \"deleted\".\nDeleted experiments are not returned by APIs.",
"$ref": "#/$defs/string"
},
"name": {
"description": "Human readable name that identifies the experiment.",
"$ref": "#/$defs/string"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"tags": {
"description": "Tags: Additional metadata key-value pairs.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.MlflowModel": {
"anyOf": [
{
"type": "object",
"properties": {
"creation_timestamp": {
"description": "Timestamp recorded when this `registered_model` was created.",
"$ref": "#/$defs/int64"
},
"description": {
"description": "Description of this `registered_model`.",
"$ref": "#/$defs/string"
},
"last_updated_timestamp": {
"description": "Timestamp recorded when metadata for this `registered_model` was last updated.",
"$ref": "#/$defs/int64"
},
"latest_versions": {
"description": "Collection of latest model versions for each stage.\nOnly contains models with current `READY` status.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelVersion"
},
"name": {
"description": "Unique name for the model.",
"$ref": "#/$defs/string"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"tags": {
"description": "Tags: Additional metadata key-value pairs for this `registered_model`.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelTag"
},
"user_id": {
"description": "User that created this `registered_model`",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.ModelServingEndpoint": {
"anyOf": [
{
"type": "object",
"properties": {
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"ai_gateway": {
"description": "The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are supported as of now.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayConfig"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"config": {
"description": "The core config of the serving endpoint.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointCoreConfigInput"
},
"name": {
"description": "The name of the serving endpoint. This field is required and must be unique across a Databricks workspace.\nAn endpoint name can consist of alphanumeric characters, dashes, and underscores.\n",
"$ref": "#/$defs/string"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"rate_limits": {
"description": "Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI Gateway to manage rate limits.",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.RateLimit"
},
"route_optimized": {
"description": "Enable route optimization for the serving endpoint.",
"$ref": "#/$defs/bool"
},
"tags": {
"description": "Tags to be attached to the serving endpoint and automatically propagated to billing logs.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.EndpointTag"
}
},
"additionalProperties": false,
"required": [
"config",
"name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Permission": {
"anyOf": [
{
"type": "object",
"properties": {
"group_name": {
"$ref": "#/$defs/string"
},
"level": {
"$ref": "#/$defs/string"
},
"service_principal_name": {
"$ref": "#/$defs/string"
},
"user_name": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"level"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Pipeline": {
"anyOf": [
{
"type": "object",
"properties": {
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"budget_policy_id": {
"description": "Budget policy of this pipeline.",
"$ref": "#/$defs/string"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"catalog": {
"description": "A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables in this pipeline are published to a `target` schema inside `catalog` (for example, `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.",
"$ref": "#/$defs/string"
},
"channel": {
"description": "DLT Release Channel that specifies which version to use.",
"$ref": "#/$defs/string"
},
"clusters": {
"description": "Cluster settings for this pipeline deployment.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster"
},
"configuration": {
"description": "String-String configuration for this pipeline execution.",
"$ref": "#/$defs/map/string"
},
"continuous": {
"description": "Whether the pipeline is continuous or triggered. This replaces `trigger`.",
"$ref": "#/$defs/bool"
},
"deployment": {
"description": "Deployment type of this pipeline.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineDeployment"
},
"development": {
"description": "Whether the pipeline is in Development mode. Defaults to false.",
"$ref": "#/$defs/bool"
},
"edition": {
"description": "Pipeline product edition.",
"$ref": "#/$defs/string"
},
"filters": {
"description": "Filters on which Pipeline packages to include in the deployed graph.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.Filters"
},
"gateway_definition": {
"description": "The definition of a gateway pipeline to support CDC.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionGatewayPipelineDefinition"
},
"id": {
"description": "Unique identifier for this pipeline.",
"$ref": "#/$defs/string"
},
"ingestion_definition": {
"description": "The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionPipelineDefinition"
},
"libraries": {
"description": "Libraries or code needed by this deployment.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary"
},
"name": {
"description": "Friendly identifier for this pipeline.",
"$ref": "#/$defs/string"
},
"notifications": {
"description": "List of notification settings for this pipeline.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.Notifications"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"photon": {
"description": "Whether Photon is enabled for this pipeline.",
"$ref": "#/$defs/bool"
},
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"schema": {
"description": "The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.",
"$ref": "#/$defs/string"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"serverless": {
"description": "Whether serverless compute is enabled for this pipeline.",
"$ref": "#/$defs/bool"
},
"storage": {
"description": "DBFS root directory for storing checkpoints and tables.",
"$ref": "#/$defs/string"
},
"target": {
"description": "Target schema (database) to add tables in this pipeline to. If not specified, no data is published to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.",
"$ref": "#/$defs/string"
},
"trigger": {
"description": "Which pipeline trigger to use. Deprecated: Use `continuous` instead.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineTrigger"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.QualityMonitor": {
"anyOf": [
{
"type": "object",
"properties": {
"assets_dir": {
"description": "The directory to store monitoring assets (e.g. dashboard, metric tables).",
"$ref": "#/$defs/string"
},
"baseline_table_name": {
"description": "Name of the baseline table from which drift metrics are computed from.\nColumns in the monitored table should also be present in the baseline table.\n",
"$ref": "#/$defs/string"
},
"custom_metrics": {
"description": "Custom metrics to compute on the monitored table. These can be aggregate metrics, derived\nmetrics (from already computed aggregate metrics), or drift metrics (comparing metrics across time\nwindows).\n",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric"
},
"data_classification_config": {
"description": "The data classification config for the monitor.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDataClassificationConfig"
},
"inference_log": {
"description": "Configuration for monitoring inference logs.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog"
},
"notifications": {
"description": "The notification settings for the monitor.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorNotifications"
},
"output_schema_name": {
"description": "Schema where output metric tables are created.",
"$ref": "#/$defs/string"
},
"schedule": {
"description": "The schedule for automatically updating and refreshing metric tables.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedule"
},
"skip_builtin_dashboard": {
"description": "Whether to skip creating a default dashboard summarizing data quality metrics.",
"$ref": "#/$defs/bool"
},
"slicing_exprs": {
"description": "List of column expressions to slice data with for targeted analysis. The data is grouped by\neach expression independently, resulting in a separate slice for each predicate and its\ncomplements. For high-cardinality columns, only the top 100 unique values by frequency will\ngenerate slices.\n",
"$ref": "#/$defs/slice/string"
},
"snapshot": {
"description": "Configuration for monitoring snapshot tables.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorSnapshot"
},
"time_series": {
"description": "Configuration for monitoring time series tables.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries"
},
"warehouse_id": {
"description": "Optional argument to specify the warehouse for dashboard creation. If not specified, the first running\nwarehouse will be used.\n",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"assets_dir",
"output_schema_name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.RegisteredModel": {
"anyOf": [
{
"type": "object",
"properties": {
"catalog_name": {
"description": "The name of the catalog where the schema and the registered model reside",
"$ref": "#/$defs/string"
},
"comment": {
"description": "The comment attached to the registered model",
"$ref": "#/$defs/string"
},
"grants": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Grant"
},
"name": {
"description": "The name of the registered model",
"$ref": "#/$defs/string"
},
"schema_name": {
"description": "The name of the schema where the registered model resides",
"$ref": "#/$defs/string"
},
"storage_location": {
"description": "The storage location on the cloud under which model version data files are stored",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"catalog_name",
"name",
"schema_name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Schema": {
"anyOf": [
{
"type": "object",
"properties": {
"catalog_name": {
"description": "Name of parent catalog.",
"$ref": "#/$defs/string"
},
"comment": {
"description": "User-provided free-form text description.",
"$ref": "#/$defs/string"
},
"grants": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Grant"
},
"name": {
"description": "Name of schema, relative to parent catalog.",
"$ref": "#/$defs/string"
},
"properties": {
"$ref": "#/$defs/map/string"
},
"storage_root": {
"description": "Storage root URL for managed tables within schema.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"catalog_name",
"name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"variable.Lookup": {
"anyOf": [
{
"type": "object",
"properties": {
"alert": {
"$ref": "#/$defs/string"
},
"cluster": {
"$ref": "#/$defs/string"
},
"cluster_policy": {
"$ref": "#/$defs/string"
},
"dashboard": {
"$ref": "#/$defs/string"
},
"instance_pool": {
"$ref": "#/$defs/string"
},
"job": {
"$ref": "#/$defs/string"
},
"metastore": {
"$ref": "#/$defs/string"
},
"pipeline": {
"$ref": "#/$defs/string"
},
"query": {
"$ref": "#/$defs/string"
},
"service_principal": {
"$ref": "#/$defs/string"
},
"warehouse": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"variable.TargetVariable": {
"anyOf": [
{
"type": "object",
"properties": {
"default": {
"$ref": "#/$defs/interface"
},
"description": {
"$ref": "#/$defs/string"
},
"lookup": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup"
},
"type": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType"
}
},
"additionalProperties": false
},
{}
]
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"variable.Variable": {
"type": "object",
"properties": {
"default": {
"$ref": "#/$defs/interface"
},
"description": {
"$ref": "#/$defs/string"
},
"lookup": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Lookup"
},
"type": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.VariableType"
}
},
"additionalProperties": false
},
"variable.VariableType": {
"type": "string"
}
},
"config.Artifact": {
"anyOf": [
{
"type": "object",
"properties": {
"build": {
"$ref": "#/$defs/string"
},
"executable": {
"$ref": "#/$defs/github.com/databricks/cli/libs/exec.ExecutableType"
},
"files": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config.ArtifactFile"
},
"path": {
"$ref": "#/$defs/string"
},
"type": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactType"
}
},
"additionalProperties": false,
"required": [
"type"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.ArtifactFile": {
"anyOf": [
{
"type": "object",
"properties": {
"source": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"source"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.ArtifactType": {
"type": "string"
},
"config.Bundle": {
"anyOf": [
{
"type": "object",
"properties": {
"cluster_id": {
"$ref": "#/$defs/string"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"compute_id": {
"$ref": "#/$defs/string"
},
"databricks_cli_version": {
"$ref": "#/$defs/string"
},
"deployment": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Deployment"
},
"git": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git"
},
"name": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Command": {
"type": "string"
},
"config.Deployment": {
"anyOf": [
{
"type": "object",
"properties": {
"fail_on_active_runs": {
"$ref": "#/$defs/bool"
},
"lock": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Lock"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Experimental": {
"anyOf": [
{
"type": "object",
"properties": {
"pydabs": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.PyDABs"
},
"python_wheel_wrapper": {
"$ref": "#/$defs/bool"
},
"scripts": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Command"
},
"use_legacy_run_as": {
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Git": {
"anyOf": [
{
"type": "object",
"properties": {
"branch": {
"$ref": "#/$defs/string"
},
"origin_url": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Lock": {
"anyOf": [
{
"type": "object",
"properties": {
"enabled": {
"$ref": "#/$defs/bool"
},
"force": {
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Mode": {
"type": "string"
},
"config.Presets": {
"anyOf": [
{
"type": "object",
"properties": {
"jobs_max_concurrent_runs": {
"$ref": "#/$defs/int"
},
"name_prefix": {
"$ref": "#/$defs/string"
},
"pipelines_development": {
"$ref": "#/$defs/bool"
},
"tags": {
"$ref": "#/$defs/map/string"
},
"trigger_pause_status": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.PyDABs": {
"anyOf": [
{
"type": "object",
"properties": {
"enabled": {
"$ref": "#/$defs/bool"
},
"import": {
"$ref": "#/$defs/slice/string"
},
"venv_path": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Resources": {
"anyOf": [
{
"type": "object",
"properties": {
"clusters": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Cluster"
},
"dashboards": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Dashboard"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"experiments": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowExperiment"
},
"jobs": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Job"
},
"model_serving_endpoints": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint"
},
"models": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.MlflowModel"
},
"pipelines": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Pipeline"
},
"quality_monitors": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.QualityMonitor"
},
"registered_models": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.RegisteredModel"
},
"schemas": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/resources.Schema"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Sync": {
"anyOf": [
{
"type": "object",
"properties": {
"exclude": {
"$ref": "#/$defs/slice/string"
},
"include": {
"$ref": "#/$defs/slice/string"
},
"paths": {
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Target": {
"anyOf": [
{
"type": "object",
"properties": {
"artifacts": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact"
},
"bundle": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle"
},
"cluster_id": {
"$ref": "#/$defs/string"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"compute_id": {
"$ref": "#/$defs/string"
},
"default": {
"$ref": "#/$defs/bool"
},
"git": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Git"
},
"mode": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Mode"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"presets": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets"
},
"resources": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources"
},
"run_as": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs"
},
"sync": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync"
},
"variables": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.TargetVariable"
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
},
"workspace": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Workspace": {
"anyOf": [
{
"type": "object",
"properties": {
"artifact_path": {
"$ref": "#/$defs/string"
},
"auth_type": {
"$ref": "#/$defs/string"
},
"azure_client_id": {
"$ref": "#/$defs/string"
},
"azure_environment": {
"$ref": "#/$defs/string"
},
"azure_login_app_id": {
"$ref": "#/$defs/string"
},
"azure_tenant_id": {
"$ref": "#/$defs/string"
},
"azure_use_msi": {
"$ref": "#/$defs/bool"
},
"azure_workspace_resource_id": {
"$ref": "#/$defs/string"
},
"client_id": {
"$ref": "#/$defs/string"
},
"file_path": {
"$ref": "#/$defs/string"
},
"google_service_account": {
"$ref": "#/$defs/string"
},
"host": {
"$ref": "#/$defs/string"
},
"profile": {
"$ref": "#/$defs/string"
},
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"resource_path": {
"$ref": "#/$defs/string"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"root_path": {
"$ref": "#/$defs/string"
},
"state_path": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
},
"libs": {
"exec.ExecutableType": {
"type": "string"
}
}
},
"databricks-sdk-go": {
"service": {
"catalog.MonitorCronSchedule": {
"anyOf": [
{
"type": "object",
"properties": {
"pause_status": {
"description": "Read only field that indicates whether a schedule is paused or not.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorCronSchedulePauseStatus",
"enum": [
"UNPAUSED",
"PAUSED"
]
},
"quartz_cron_expression": {
"description": "The expression that determines when to run the monitor. See [examples](https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html).\n",
"$ref": "#/$defs/string"
},
"timezone_id": {
"description": "The timezone id (e.g., ``\"PST\"``) in which to evaluate the quartz expression.\n",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"quartz_cron_expression",
"timezone_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorCronSchedulePauseStatus": {
"type": "string"
},
"catalog.MonitorDataClassificationConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"enabled": {
"description": "Whether data classification is enabled.",
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorDestination": {
"anyOf": [
{
"type": "object",
"properties": {
"email_addresses": {
"description": "The list of email addresses to send the notification to. A maximum of 5 email addresses is supported.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorInferenceLog": {
"anyOf": [
{
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"$ref": "#/$defs/slice/string"
},
"label_col": {
"description": "Optional column that contains the ground truth for the prediction.",
"$ref": "#/$defs/string"
},
"model_id_col": {
"description": "Column that contains the id of the model generating the predictions. Metrics will be computed per model id by\ndefault, and also across all model ids.\n",
"$ref": "#/$defs/string"
},
"prediction_col": {
"description": "Column that contains the output/prediction from the model.",
"$ref": "#/$defs/string"
},
"prediction_proba_col": {
"description": "Optional column that contains the prediction probabilities for each class in a classification problem type.\nThe values in this column should be a map, mapping each class label to the prediction probability for a given\nsample. The map should be of PySpark MapType().\n",
"$ref": "#/$defs/string"
},
"problem_type": {
"description": "Problem type the model aims to solve. Determines the type of model-quality metrics that will be computed.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLogProblemType",
"enum": [
"PROBLEM_TYPE_CLASSIFICATION",
"PROBLEM_TYPE_REGRESSION"
]
},
"timestamp_col": {
"description": "Column that contains the timestamps of requests. The column must be one of the following:\n- A ``TimestampType`` column\n- A column whose values can be converted to timestamps through the pyspark\n ``to_timestamp`` [function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html).\n",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"granularities",
"model_id_col",
"prediction_col",
"problem_type",
"timestamp_col"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorInferenceLogProblemType": {
"type": "string"
},
"catalog.MonitorMetric": {
"anyOf": [
{
"type": "object",
"properties": {
"definition": {
"description": "Jinja template for a SQL expression that specifies how to compute the metric. See [create metric definition](https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition).",
"$ref": "#/$defs/string"
},
"input_columns": {
"description": "A list of column names in the input table the metric should be computed for.\nCan use ``\":table\"`` to indicate that the metric needs information from multiple columns.\n",
"$ref": "#/$defs/slice/string"
},
"name": {
"description": "Name of the metric in the output tables.",
"$ref": "#/$defs/string"
},
"output_data_type": {
"description": "The output type of the custom metric.",
"$ref": "#/$defs/string"
},
"type": {
"description": "Can only be one of ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"``, ``\"CUSTOM_METRIC_TYPE_DERIVED\"``, or ``\"CUSTOM_METRIC_TYPE_DRIFT\"``.\nThe ``\"CUSTOM_METRIC_TYPE_AGGREGATE\"`` and ``\"CUSTOM_METRIC_TYPE_DERIVED\"`` metrics\nare computed on a single table, whereas the ``\"CUSTOM_METRIC_TYPE_DRIFT\"`` compare metrics across\nbaseline and input table, or across the two consecutive time windows.\n- CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing columns in your table\n- CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate metrics\n- CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetricType",
"enum": [
"CUSTOM_METRIC_TYPE_AGGREGATE",
"CUSTOM_METRIC_TYPE_DERIVED",
"CUSTOM_METRIC_TYPE_DRIFT"
]
}
},
"additionalProperties": false,
"required": [
"definition",
"input_columns",
"name",
"output_data_type",
"type"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorMetricType": {
"type": "string"
},
"catalog.MonitorNotifications": {
"anyOf": [
{
"type": "object",
"properties": {
"on_failure": {
"description": "Who to send notifications to on monitor failure.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination"
},
"on_new_classification_tag_detected": {
"description": "Who to send notifications to when new data classification tags are detected.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorDestination"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorSnapshot": {
"anyOf": [
{
"type": "object",
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"catalog.MonitorTimeSeries": {
"anyOf": [
{
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"$ref": "#/$defs/slice/string"
},
"timestamp_col": {
"description": "Column that contains the timestamps of requests. The column must be one of the following:\n- A ``TimestampType`` column\n- A column whose values can be converted to timestamps through the pyspark\n ``to_timestamp`` [function](https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html).\n",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"granularities",
"timestamp_col"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.Adlsgen2Info": {
"anyOf": [
{
"type": "object",
"properties": {
"destination": {
"description": "abfss destination, e.g. `abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e`.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"destination"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.AutoScale": {
"anyOf": [
{
"type": "object",
"properties": {
"max_workers": {
"description": "The maximum number of workers to which the cluster can scale up when overloaded.\nNote that `max_workers` must be strictly greater than `min_workers`.",
"$ref": "#/$defs/int"
},
"min_workers": {
"description": "The minimum number of workers to which the cluster can scale down when underutilized.\nIt is also the initial number of workers the cluster will have after creation.",
"$ref": "#/$defs/int"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.AwsAttributes": {
"anyOf": [
{
"type": "object",
"properties": {
"availability": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAvailability"
},
"ebs_volume_count": {
"description": "The number of volumes launched for each instance. Users can choose up to 10 volumes.\nThis feature is only enabled for supported node types. Legacy node types cannot specify\ncustom EBS volumes.\nFor node types with no instance store, at least one EBS volume needs to be specified;\notherwise, cluster creation will fail.\n\nThese EBS volumes will be mounted at `/ebs0`, `/ebs1`, and etc.\nInstance store volumes will be mounted at `/local_disk0`, `/local_disk1`, and etc.\n\nIf EBS volumes are attached, Databricks will configure Spark to use only the EBS volumes for\nscratch storage because heterogenously sized scratch devices can lead to inefficient disk\nutilization. If no EBS volumes are attached, Databricks will configure Spark to use instance\nstore volumes.\n\nPlease note that if EBS volumes are specified, then the Spark configuration `spark.local.dir`\nwill be overridden.",
"$ref": "#/$defs/int"
},
"ebs_volume_iops": {
"description": "If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.",
"$ref": "#/$defs/int"
},
"ebs_volume_size": {
"description": "The size of each EBS volume (in GiB) launched for each instance. For general purpose\nSSD, this value must be within the range 100 - 4096. For throughput optimized HDD,\nthis value must be within the range 500 - 4096.",
"$ref": "#/$defs/int"
},
"ebs_volume_throughput": {
"description": "If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.",
"$ref": "#/$defs/int"
},
"ebs_volume_type": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.EbsVolumeType"
},
"first_on_demand": {
"description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nIf this value is greater than 0, the cluster driver node in particular will be placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.",
"$ref": "#/$defs/int"
},
"instance_profile_arn": {
"description": "Nodes for this cluster will only be placed on AWS instances with this instance profile. If\nommitted, nodes will be placed on instances without an IAM instance profile. The instance\nprofile must have previously been added to the Databricks environment by an account\nadministrator.\n\nThis feature may only be available to certain customer plans.\n\nIf this field is ommitted, we will pull in the default from the conf if it exists.",
"$ref": "#/$defs/string"
},
"spot_bid_price_percent": {
"description": "The bid price for AWS spot instances, as a percentage of the corresponding instance type's\non-demand price.\nFor example, if this field is set to 50, and the cluster needs a new `r3.xlarge` spot\ninstance, then the bid price is half of the price of\non-demand `r3.xlarge` instances. Similarly, if this field is set to 200, the bid price is twice\nthe price of on-demand `r3.xlarge` instances. If not specified, the default value is 100.\nWhen spot instances are requested for this cluster, only spot instances whose bid price\npercentage matches this field will be considered.\nNote that, for safety, we enforce this field to be no more than 10000.\n\nThe default value and documentation here should be kept consistent with\nCommonConf.defaultSpotBidPricePercent and CommonConf.maxSpotBidPricePercent.",
"$ref": "#/$defs/int"
},
"zone_id": {
"description": "Identifier for the availability zone/datacenter in which the cluster resides.\nThis string will be of a form like \"us-west-2a\". The provided availability\nzone must be in the same region as the Databricks deployment. For example, \"us-west-2a\"\nis not a valid zone id if the Databricks deployment resides in the \"us-east-1\" region.\nThis is an optional field at cluster creation, and if not specified, a default zone will be used.\nIf the zone specified is \"auto\", will try to place cluster in a zone with high availability,\nand will retry placement in a different AZ if there is not enough capacity.\nThe list of available zones as well as the default value can be found by using the\n`List Zones` method.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.AwsAvailability": {
"type": "string",
"description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\n\nNote: If `first_on_demand` is zero, this availability type will be used for the entire cluster.\n",
"enum": [
"SPOT",
"ON_DEMAND",
"SPOT_WITH_FALLBACK"
]
},
"compute.AzureAttributes": {
"anyOf": [
{
"type": "object",
"properties": {
"availability": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAvailability"
},
"first_on_demand": {
"description": "The first `first_on_demand` nodes of the cluster will be placed on on-demand instances.\nThis value should be greater than 0, to make sure the cluster driver node is placed on an\non-demand instance. If this value is greater than or equal to the current cluster size, all\nnodes will be placed on on-demand instances. If this value is less than the current cluster\nsize, `first_on_demand` nodes will be placed on on-demand instances and the remainder will\nbe placed on `availability` instances. Note that this value does not affect\ncluster size and cannot currently be mutated over the lifetime of a cluster.",
"$ref": "#/$defs/int"
},
"log_analytics_info": {
"description": "Defines values necessary to configure and run Azure Log Analytics agent",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo"
},
"spot_bid_max_price": {
"description": "The max bid price to be used for Azure spot instances.\nThe Max price for the bid cannot be higher than the on-demand price of the instance.\nIf not specified, the default value is -1, which specifies that the instance cannot be evicted\non the basis of price, and only on the basis of availability. Further, the value should \u003e 0 or -1.",
"$ref": "#/$defs/float64"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.AzureAvailability": {
"type": "string",
"description": "Availability type used for all subsequent nodes past the `first_on_demand` ones.\nNote: If `first_on_demand` is zero (which only happens on pool clusters), this availability\ntype will be used for the entire cluster.",
"enum": [
"SPOT_AZURE",
"ON_DEMAND_AZURE",
"SPOT_WITH_FALLBACK_AZURE"
]
},
"compute.ClientsTypes": {
"anyOf": [
{
"type": "object",
"properties": {
"jobs": {
"description": "With jobs set, the cluster can be used for jobs",
"$ref": "#/$defs/bool"
},
"notebooks": {
"description": "With notebooks set, this cluster can be used for notebooks",
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.ClusterLogConf": {
"anyOf": [
{
"type": "object",
"properties": {
"dbfs": {
"description": "destination needs to be provided. e.g.\n`{ \"dbfs\" : { \"destination\" : \"dbfs:/home/cluster_log\" } }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo"
},
"s3": {
"description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \"s3\": { \"destination\" : \"s3://cluster_log_bucket/prefix\", \"region\" : \"us-west-2\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.ClusterSpec": {
"anyOf": [
{
"type": "object",
"properties": {
"apply_policy_default_values": {
"description": "When set to true, fixed and default values from the policy will be used for fields that are omitted. When set to false, only fixed values from the policy will be applied.",
"$ref": "#/$defs/bool"
},
"autoscale": {
"description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AutoScale"
},
"autotermination_minutes": {
"description": "Automatically terminates the cluster after it is inactive for this time in minutes. If not set,\nthis cluster will not be automatically terminated. If specified, the threshold must be between\n10 and 10000 minutes.\nUsers can also set this value to 0 to explicitly disable automatic termination.",
"$ref": "#/$defs/int"
},
"aws_attributes": {
"description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes"
},
"azure_attributes": {
"description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes"
},
"cluster_log_conf": {
"description": "The configuration for delivering spark logs to a long-term storage destination.\nTwo kinds of destinations (dbfs and s3) are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf"
},
"cluster_name": {
"description": "Cluster name requested by the user. This doesn't have to be unique.\nIf not specified at creation, the cluster name will be an empty string.\n",
"$ref": "#/$defs/string"
},
"custom_tags": {
"description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags",
"$ref": "#/$defs/map/string"
},
"data_security_mode": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DataSecurityMode"
},
"docker_image": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerImage"
},
"driver_instance_pool_id": {
"description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.",
"$ref": "#/$defs/string"
},
"driver_node_type_id": {
"description": "The node type of the Spark driver. Note that this field is optional;\nif unset, the driver node type will be set as the same value\nas `node_type_id` defined above.\n",
"$ref": "#/$defs/string"
},
"enable_elastic_disk": {
"description": "Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk\nspace when its Spark workers are running low on disk space. This feature requires specific AWS\npermissions to function correctly - refer to the User Guide for more details.",
"$ref": "#/$defs/bool"
},
"enable_local_disk_encryption": {
"description": "Whether to enable LUKS on cluster VMs' local disks",
"$ref": "#/$defs/bool"
},
"gcp_attributes": {
"description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes"
},
"init_scripts": {
"description": "The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo"
},
"instance_pool_id": {
"description": "The optional ID of the instance pool to which the cluster belongs.",
"$ref": "#/$defs/string"
},
"node_type_id": {
"description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.\n",
"$ref": "#/$defs/string"
},
"num_workers": {
"description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.",
"$ref": "#/$defs/int"
},
"policy_id": {
"description": "The ID of the cluster policy used to create the cluster if applicable.",
"$ref": "#/$defs/string"
},
"runtime_engine": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RuntimeEngine"
},
"single_user_name": {
"description": "Single user name if data_security_mode is `SINGLE_USER`",
"$ref": "#/$defs/string"
},
"spark_conf": {
"description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nUsers can also pass in a string of extra JVM options to the driver and the executors via\n`spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.\n",
"$ref": "#/$defs/map/string"
},
"spark_env_vars": {
"description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`",
"$ref": "#/$defs/map/string"
},
"spark_version": {
"description": "The Spark version of the cluster, e.g. `3.3.x-scala2.11`.\nA list of available Spark versions can be retrieved by using\nthe :method:clusters/sparkVersions API call.\n",
"$ref": "#/$defs/string"
},
"ssh_public_keys": {
"description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.",
"$ref": "#/$defs/slice/string"
},
"workload_type": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkloadType"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.DataSecurityMode": {
"type": "string",
"description": "Data security mode decides what data governance model to use when accessing data\nfrom a cluster.\n\n* `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode.\n* `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode.\n* `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.\n\nThe following modes are deprecated starting with Databricks Runtime 15.0 and\nwill be removed for future Databricks Runtime versions:\n\n* `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters.\n* `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters.\n* `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters.\n* `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesnt have UC nor passthrough enabled.\n",
"enum": [
"NONE",
"SINGLE_USER",
"USER_ISOLATION",
"LEGACY_TABLE_ACL",
"LEGACY_PASSTHROUGH",
"LEGACY_SINGLE_USER",
"LEGACY_SINGLE_USER_STANDARD"
]
},
"compute.DbfsStorageInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"destination": {
"description": "dbfs destination, e.g. `dbfs:/my/path`",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"destination"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.DockerBasicAuth": {
"anyOf": [
{
"type": "object",
"properties": {
"password": {
"description": "Password of the user",
"$ref": "#/$defs/string"
},
"username": {
"description": "Name of the user",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.DockerImage": {
"anyOf": [
{
"type": "object",
"properties": {
"basic_auth": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DockerBasicAuth"
},
"url": {
"description": "URL of the docker image.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.EbsVolumeType": {
"type": "string",
"description": "The type of EBS volumes that will be launched with this cluster.",
"enum": [
"GENERAL_PURPOSE_SSD",
"THROUGHPUT_OPTIMIZED_HDD"
]
},
"compute.Environment": {
"anyOf": [
{
"type": "object",
"description": "The environment entity used to preserve serverless environment side panel and jobs' environment for non-notebook task.\nIn this minimal environment spec, only pip dependencies are supported.",
"properties": {
"client": {
"description": "Client version used by the environment\nThe client is the user-facing environment of the runtime.\nEach client comes with a specific set of pre-installed libraries.\nThe version is a string, consisting of the major client version.",
"$ref": "#/$defs/string"
},
"dependencies": {
"description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e\nE.g. dependencies: [\"foo==0.0.1\", \"-r /Workspace/test/requirements.txt\"]",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false,
"required": [
"client"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.GcpAttributes": {
"anyOf": [
{
"type": "object",
"properties": {
"availability": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAvailability"
},
"boot_disk_size": {
"description": "boot disk size in GB",
"$ref": "#/$defs/int"
},
"google_service_account": {
"description": "If provided, the cluster will impersonate the google service account when accessing\ngcloud services (like GCS). The google service account\nmust have previously been added to the Databricks environment by an account\nadministrator.",
"$ref": "#/$defs/string"
},
"local_ssd_count": {
"description": "If provided, each node (workers and driver) in the cluster will have this number of local SSDs attached. Each local SSD is 375GB in size. Refer to [GCP documentation](https://cloud.google.com/compute/docs/disks/local-ssd#choose_number_local_ssds) for the supported number of local SSDs for each instance type.",
"$ref": "#/$defs/int"
},
"use_preemptible_executors": {
"description": "This field determines whether the spark executors will be scheduled to run on preemptible VMs (when set to true) versus standard compute engine VMs (when set to false; default).\nNote: Soon to be deprecated, use the availability field instead.",
"$ref": "#/$defs/bool"
},
"zone_id": {
"description": "Identifier for the availability zone in which the cluster resides.\nThis can be one of the following:\n- \"HA\" =\u003e High availability, spread nodes across availability zones for a Databricks deployment region [default]\n- \"AUTO\" =\u003e Databricks picks an availability zone to schedule the cluster on.\n- A GCP availability zone =\u003e Pick One of the available zones for (machine type + region) from https://cloud.google.com/compute/docs/regions-zones.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.GcpAvailability": {
"type": "string",
"description": "This field determines whether the instance pool will contain preemptible\nVMs, on-demand VMs, or preemptible VMs with a fallback to on-demand VMs if the former is unavailable.",
"enum": [
"PREEMPTIBLE_GCP",
"ON_DEMAND_GCP",
"PREEMPTIBLE_WITH_FALLBACK_GCP"
]
},
"compute.GcsStorageInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"destination": {
"description": "GCS destination/URI, e.g. `gs://my-bucket/some-prefix`",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"destination"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.InitScriptInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"abfss": {
"description": "destination needs to be provided. e.g.\n`{ \"abfss\" : { \"destination\" : \"abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e\" } }",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
},
"dbfs": {
"description": "destination needs to be provided. e.g.\n`{ \"dbfs\" : { \"destination\" : \"dbfs:/home/cluster_log\" } }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.DbfsStorageInfo"
},
"file": {
"description": "destination needs to be provided. e.g.\n`{ \"file\" : { \"destination\" : \"file:/my/local/file.sh\" } }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.LocalFileInfo"
},
"gcs": {
"description": "destination needs to be provided. e.g.\n`{ \"gcs\": { \"destination\": \"gs://my-bucket/file.sh\" } }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcsStorageInfo"
},
"s3": {
"description": "destination and either the region or endpoint need to be provided. e.g.\n`{ \"s3\": { \"destination\" : \"s3://cluster_log_bucket/prefix\", \"region\" : \"us-west-2\" } }`\nCluster iam role is used to access s3, please make sure the cluster iam role in\n`instance_profile_arn` has permission to write data to the s3 destination.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.S3StorageInfo"
},
"volumes": {
"description": "destination needs to be provided. e.g.\n`{ \"volumes\" : { \"destination\" : \"/Volumes/my-init.sh\" } }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.VolumesStorageInfo"
},
"workspace": {
"description": "destination needs to be provided. e.g.\n`{ \"workspace\" : { \"destination\" : \"/Users/user1@databricks.com/my-init.sh\" } }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.WorkspaceStorageInfo"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.Library": {
"anyOf": [
{
"type": "object",
"properties": {
"cran": {
"description": "Specification of a CRAN library to be installed as part of the library",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.RCranLibrary"
},
"egg": {
"description": "Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.",
"$ref": "#/$defs/string"
},
"jar": {
"description": "URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"jar\": \"/Workspace/path/to/library.jar\" }`, `{ \"jar\" : \"/Volumes/path/to/library.jar\" }` or\n`{ \"jar\": \"s3://my-bucket/library.jar\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.",
"$ref": "#/$defs/string"
},
"maven": {
"description": "Specification of a maven library to be installed. For example:\n`{ \"coordinates\": \"org.jsoup:jsoup:1.7.2\" }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary"
},
"pypi": {
"description": "Specification of a PyPi library to be installed. For example:\n`{ \"package\": \"simplejson\" }`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.PythonPyPiLibrary"
},
"requirements": {
"description": "URI of the requirements.txt file to install. Only Workspace paths and Unity Catalog Volumes paths are supported.\nFor example: `{ \"requirements\": \"/Workspace/path/to/requirements.txt\" }` or `{ \"requirements\" : \"/Volumes/path/to/requirements.txt\" }`",
"$ref": "#/$defs/string"
},
"whl": {
"description": "URI of the wheel library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.\nFor example: `{ \"whl\": \"/Workspace/path/to/library.whl\" }`, `{ \"whl\" : \"/Volumes/path/to/library.whl\" }` or\n`{ \"whl\": \"s3://my-bucket/library.whl\" }`.\nIf S3 is used, please make sure the cluster has read access on the library. You may need to\nlaunch the cluster with an IAM role to access the S3 URI.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.LocalFileInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"destination": {
"description": "local file destination, e.g. `file:/my/local/file.sh`",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"destination"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.LogAnalyticsInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"log_analytics_primary_key": {
"description": "\u003cneeds content added\u003e",
"$ref": "#/$defs/string"
},
"log_analytics_workspace_id": {
"description": "\u003cneeds content added\u003e",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.MavenLibrary": {
"anyOf": [
{
"type": "object",
"properties": {
"coordinates": {
"description": "Gradle-style maven coordinates. For example: \"org.jsoup:jsoup:1.7.2\".",
"$ref": "#/$defs/string"
},
"exclusions": {
"description": "List of dependences to exclude. For example: `[\"slf4j:slf4j\", \"*:hadoop-client\"]`.\n\nMaven dependency exclusions:\nhttps://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html.",
"$ref": "#/$defs/slice/string"
},
"repo": {
"description": "Maven repo to install the Maven package from. If omitted, both Maven Central Repository\nand Spark Packages are searched.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"coordinates"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.PythonPyPiLibrary": {
"anyOf": [
{
"type": "object",
"properties": {
"package": {
"description": "The name of the pypi package to install. An optional exact version specification is also\nsupported. Examples: \"simplejson\" and \"simplejson==3.8.0\".",
"$ref": "#/$defs/string"
},
"repo": {
"description": "The repository where the package can be found. If not specified, the default pip index is\nused.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"package"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.RCranLibrary": {
"anyOf": [
{
"type": "object",
"properties": {
"package": {
"description": "The name of the CRAN package to install.",
"$ref": "#/$defs/string"
},
"repo": {
"description": "The repository where the package can be found. If not specified, the default CRAN repo is used.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"package"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.RuntimeEngine": {
"type": "string",
"description": "Determines the cluster's runtime engine, either standard or Photon.\n\nThis field is not compatible with legacy `spark_version` values that contain `-photon-`.\nRemove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.\n\nIf left unspecified, the runtime engine defaults to standard unless the spark_version\ncontains -photon-, in which case Photon will be used.\n",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"enum": [
"NULL",
"STANDARD",
"PHOTON"
]
},
"compute.S3StorageInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"canned_acl": {
"description": "(Optional) Set canned access control list for the logs, e.g. `bucket-owner-full-control`.\nIf `canned_cal` is set, please make sure the cluster iam role has `s3:PutObjectAcl` permission on\nthe destination bucket and prefix. The full list of possible canned acl can be found at\nhttp://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl.\nPlease also note that by default only the object owner gets full controls. If you are using cross account\nrole for writing data, you may want to set `bucket-owner-full-control` to make bucket owner able to\nread the logs.",
"$ref": "#/$defs/string"
},
"destination": {
"description": "S3 destination, e.g. `s3://my-bucket/some-prefix` Note that logs will be delivered using\ncluster iam role, please make sure you set cluster iam role and the role has write access to the\ndestination. Please also note that you cannot use AWS keys to deliver logs.",
"$ref": "#/$defs/string"
},
"enable_encryption": {
"description": "(Optional) Flag to enable server side encryption, `false` by default.",
"$ref": "#/$defs/bool"
},
"encryption_type": {
"description": "(Optional) The encryption type, it could be `sse-s3` or `sse-kms`. It will be used only when\nencryption is enabled and the default type is `sse-s3`.",
"$ref": "#/$defs/string"
},
"endpoint": {
"description": "S3 endpoint, e.g. `https://s3-us-west-2.amazonaws.com`. Either region or endpoint needs to be set.\nIf both are set, endpoint will be used.",
"$ref": "#/$defs/string"
},
"kms_key": {
"description": "(Optional) Kms key which will be used if encryption is enabled and encryption type is set to `sse-kms`.",
"$ref": "#/$defs/string"
},
"region": {
"description": "S3 region, e.g. `us-west-2`. Either region or endpoint needs to be set. If both are set,\nendpoint will be used.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"destination"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.VolumesStorageInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"destination": {
"description": "Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh`",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"destination"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.WorkloadType": {
"anyOf": [
{
"type": "object",
"properties": {
"clients": {
"description": " defined what type of clients can use the cluster. E.g. Notebooks, Jobs",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClientsTypes"
}
},
"additionalProperties": false,
"required": [
"clients"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.WorkspaceStorageInfo": {
"anyOf": [
{
"type": "object",
"properties": {
"destination": {
"description": "workspace files destination, e.g. `/Users/user1@databricks.com/my-init.sh`",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"destination"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.Condition": {
"type": "string",
"enum": [
"ANY_UPDATED",
"ALL_UPDATED"
]
},
"jobs.ConditionTask": {
"anyOf": [
{
"type": "object",
"properties": {
"left": {
"description": "The left operand of the condition task. Can be either a string value or a job state or parameter reference.",
"$ref": "#/$defs/string"
},
"op": {
"description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTaskOp"
},
"right": {
"description": "The right operand of the condition task. Can be either a string value or a job state or parameter reference.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"left",
"op",
"right"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.ConditionTaskOp": {
"type": "string",
"description": "* `EQUAL_TO`, `NOT_EQUAL` operators perform string comparison of their operands. This means that `“12.0” == “12”` will evaluate to `false`.\n* `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL` operators perform numeric comparison of their operands. `“12.0” \u003e= “12”` will evaluate to `true`, `“10.0” \u003e= “12”` will evaluate to `false`.\n\nThe boolean comparison to task values can be implemented with operators `EQUAL_TO`, `NOT_EQUAL`. If a task value was set to a boolean value, it will be serialized to `“true”` or `“false”` for the comparison.",
"enum": [
"EQUAL_TO",
"GREATER_THAN",
"GREATER_THAN_OR_EQUAL",
"LESS_THAN",
"LESS_THAN_OR_EQUAL",
"NOT_EQUAL"
]
},
"jobs.Continuous": {
"anyOf": [
{
"type": "object",
"properties": {
"pause_status": {
"description": "Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.CronSchedule": {
"anyOf": [
{
"type": "object",
"properties": {
"pause_status": {
"description": "Indicate whether this schedule is paused or not.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus"
},
"quartz_cron_expression": {
"description": "A Cron expression using Quartz syntax that describes the schedule for a job. See [Cron Trigger](http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html) for details. This field is required.",
"$ref": "#/$defs/string"
},
"timezone_id": {
"description": "A Java timezone ID. The schedule for a job is resolved with respect to this timezone. See [Java TimeZone](https://docs.oracle.com/javase/7/docs/api/java/util/TimeZone.html) for details. This field is required.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"quartz_cron_expression",
"timezone_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.DbtTask": {
"anyOf": [
{
"type": "object",
"properties": {
"catalog": {
"description": "Optional name of the catalog to use. The value is the top level in the 3-level namespace of Unity Catalog (catalog / schema / relation). The catalog value can only be specified if a warehouse_id is specified. Requires dbt-databricks \u003e= 1.1.1.",
"$ref": "#/$defs/string"
},
"commands": {
"description": "A list of dbt commands to execute. All commands must start with `dbt`. This parameter must not be empty. A maximum of up to 10 commands can be provided.",
"$ref": "#/$defs/slice/string"
},
"profiles_directory": {
"description": "Optional (relative) path to the profiles directory. Can only be specified if no warehouse_id is specified. If no warehouse_id is specified and this folder is unset, the root directory is used.",
"$ref": "#/$defs/string"
},
"project_directory": {
"description": "Path to the project directory. Optional for Git sourced tasks, in which\ncase if no value is provided, the root of the Git repository is used.",
"$ref": "#/$defs/string"
},
"schema": {
"description": "Optional schema to write to. This parameter is only used when a warehouse_id is also provided. If not provided, the `default` schema is used.",
"$ref": "#/$defs/string"
},
"source": {
"description": "Optional location type of the project directory. When set to `WORKSPACE`, the project will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the project will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: Project is located in Databricks workspace.\n* `GIT`: Project is located in cloud Git provider.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source"
},
"warehouse_id": {
"description": "ID of the SQL warehouse to connect to. If provided, we automatically generate and provide the profile and connection details to dbt. It can be overridden on a per-command basis by using the `--profiles-dir` command line argument.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"commands"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.FileArrivalTriggerConfiguration": {
"anyOf": [
{
"type": "object",
"properties": {
"min_time_between_triggers_seconds": {
"description": "If set, the trigger starts a run only after the specified amount of time passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds",
"$ref": "#/$defs/int"
},
"url": {
"description": "URL to be monitored for file arrivals. The path must point to the root or a subpath of the external location.",
"$ref": "#/$defs/string"
},
"wait_after_last_change_seconds": {
"description": "If set, the trigger starts a run only after no file activity has occurred for the specified amount of time.\nThis makes it possible to wait for a batch of incoming files to arrive before triggering a run. The\nminimum allowed value is 60 seconds.",
"$ref": "#/$defs/int"
}
},
"additionalProperties": false,
"required": [
"url"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.ForEachTask": {
"anyOf": [
{
"type": "object",
"properties": {
"concurrency": {
"description": "An optional maximum allowed number of concurrent runs of the task.\nSet this value if you want to be able to execute multiple runs of the task concurrently.",
"$ref": "#/$defs/int"
},
"inputs": {
"description": "Array for task to iterate on. This can be a JSON string or a reference to\nan array parameter.",
"$ref": "#/$defs/string"
},
"task": {
"description": "Configuration for the task that will be run for each element in the array",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task"
}
},
"additionalProperties": false,
"required": [
"inputs",
"task"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.Format": {
"type": "string",
"enum": [
"SINGLE_TASK",
"MULTI_TASK"
]
},
"jobs.GitProvider": {
"type": "string",
"enum": [
"gitHub",
"bitbucketCloud",
"azureDevOpsServices",
"gitHubEnterprise",
"bitbucketServer",
"gitLab",
"gitLabEnterpriseEdition",
"awsCodeCommit"
]
},
"jobs.GitSnapshot": {
"anyOf": [
{
"type": "object",
"description": "Read-only state of the remote repository at the time the job was run. This field is only included on job runs.",
"properties": {
"used_commit": {
"description": "Commit that was used to execute the run. If git_branch was specified, this points to the HEAD of the branch at the time of the run; if git_tag was specified, this points to the commit the tag points to.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.GitSource": {
"anyOf": [
{
"type": "object",
"description": "An optional specification for a remote Git repository containing the source code used by tasks. Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.\n\nIf `git_source` is set, these tasks retrieve the file from the remote repository by default. However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.\n\nNote: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.",
"properties": {
"git_branch": {
"description": "Name of the branch to be checked out and used by this job. This field cannot be specified in conjunction with git_tag or git_commit.",
"$ref": "#/$defs/string"
},
"git_commit": {
"description": "Commit to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_tag.",
"$ref": "#/$defs/string"
},
"git_provider": {
"description": "Unique identifier of the service used to host the Git repository. The value is case insensitive.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.GitProvider"
},
"git_tag": {
"description": "Name of the tag to be checked out and used by this job. This field cannot be specified in conjunction with git_branch or git_commit.",
"$ref": "#/$defs/string"
},
"git_url": {
"description": "URL of the repository to be cloned by this job.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"git_provider",
"git_url"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobCluster": {
"anyOf": [
{
"type": "object",
"properties": {
"job_cluster_key": {
"description": "A unique name for the job cluster. This field is required and must be unique within the job.\n`JobTaskSettings` may refer to this field to determine which cluster to launch for the task execution.",
"$ref": "#/$defs/string"
},
"new_cluster": {
"description": "If new_cluster, a description of a cluster that is created for each task.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec"
}
},
"additionalProperties": false,
"required": [
"job_cluster_key",
"new_cluster"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobDeployment": {
"anyOf": [
{
"type": "object",
"properties": {
"kind": {
"description": "The kind of deployment that manages the job.\n\n* `BUNDLE`: The job is managed by Databricks Asset Bundle.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobDeploymentKind"
},
"metadata_file_path": {
"description": "Path of the file that contains deployment metadata.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"kind"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobDeploymentKind": {
"type": "string",
"description": "* `BUNDLE`: The job is managed by Databricks Asset Bundle.",
"enum": [
"BUNDLE"
]
},
"jobs.JobEditMode": {
"type": "string",
"description": "Edit mode of the job.\n\n* `UI_LOCKED`: The job is in a locked UI state and cannot be modified.\n* `EDITABLE`: The job is in an editable state and can be modified.",
"enum": [
"UI_LOCKED",
"EDITABLE"
]
},
"jobs.JobEmailNotifications": {
"anyOf": [
{
"type": "object",
"properties": {
"no_alert_for_skipped_runs": {
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"$ref": "#/$defs/bool"
},
"on_duration_warning_threshold_exceeded": {
"description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.",
"$ref": "#/$defs/slice/string"
},
"on_failure": {
"description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.",
"$ref": "#/$defs/slice/string"
},
"on_start": {
"description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"$ref": "#/$defs/slice/string"
},
"on_streaming_backlog_exceeded": {
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
"$ref": "#/$defs/slice/string"
},
"on_success": {
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobEnvironment": {
"anyOf": [
{
"type": "object",
"properties": {
"environment_key": {
"description": "The key of an environment. It has to be unique within a job.",
"$ref": "#/$defs/string"
},
"spec": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Environment"
}
},
"additionalProperties": false,
"required": [
"environment_key"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobNotificationSettings": {
"anyOf": [
{
"type": "object",
"properties": {
"no_alert_for_canceled_runs": {
"description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.",
"$ref": "#/$defs/bool"
},
"no_alert_for_skipped_runs": {
"description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.",
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobParameterDefinition": {
"anyOf": [
{
"type": "object",
"properties": {
"default": {
"description": "Default value of the parameter.",
"$ref": "#/$defs/string"
},
"name": {
"description": "The name of the defined parameter. May only contain alphanumeric characters, `_`, `-`, and `.`",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"default",
"name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobRunAs": {
"anyOf": [
{
"type": "object",
"description": "Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline.\n\nExactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an error is thrown.",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"properties": {
"service_principal_name": {
"description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.",
"$ref": "#/$defs/string"
},
"user_name": {
"description": "The email of an active workspace user. Non-admin users can only set this field to their own email.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobSource": {
"anyOf": [
{
"type": "object",
"description": "The source of the job specification in the remote repository when the job is source controlled.",
"properties": {
"dirty_state": {
"description": "Dirty state indicates the job is not fully synced with the job specification in the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobSourceDirtyState"
},
"import_from_git_branch": {
"description": "Name of the branch which the job is imported from.",
"$ref": "#/$defs/string"
},
"job_config_path": {
"description": "Path of the job YAML file that contains the job specification.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"import_from_git_branch",
"job_config_path"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobSourceDirtyState": {
"type": "string",
"description": "Dirty state indicates the job is not fully synced with the job specification\nin the remote repository.\n\nPossible values are:\n* `NOT_SYNCED`: The job is not yet synced with the remote job specification. Import the remote job specification from UI to make the job fully synced.\n* `DISCONNECTED`: The job is temporary disconnected from the remote job specification and is allowed for live edit. Import the remote job specification again from UI to make the job fully synced.",
"enum": [
"NOT_SYNCED",
"DISCONNECTED"
]
},
"jobs.JobsHealthMetric": {
"type": "string",
"description": "Specifies the health metric that is being evaluated for a particular health rule.\n\n* `RUN_DURATION_SECONDS`: Expected total time for a run in seconds.\n* `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Private Preview.\n* `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Private Preview.\n* `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Private Preview.\n* `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Private Preview.",
"enum": [
"RUN_DURATION_SECONDS",
"STREAMING_BACKLOG_BYTES",
"STREAMING_BACKLOG_RECORDS",
"STREAMING_BACKLOG_SECONDS",
"STREAMING_BACKLOG_FILES"
]
},
"jobs.JobsHealthOperator": {
"type": "string",
"description": "Specifies the operator used to compare the health metric value with the specified threshold.",
"enum": [
"GREATER_THAN"
]
},
"jobs.JobsHealthRule": {
"anyOf": [
{
"type": "object",
"properties": {
"metric": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthMetric"
},
"op": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthOperator"
},
"value": {
"description": "Specifies the threshold value that the health metric should obey to satisfy the health rule.",
"$ref": "#/$defs/int64"
}
},
"additionalProperties": false,
"required": [
"metric",
"op",
"value"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobsHealthRules": {
"anyOf": [
{
"type": "object",
"description": "An optional set of health rules that can be defined for this job.",
"properties": {
"rules": {
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.NotebookTask": {
"anyOf": [
{
"type": "object",
"properties": {
"base_parameters": {
"description": "Base parameters to be used for each run of this job. If the run is initiated by a call to :method:jobs/run\nNow with parameters specified, the two parameters maps are merged. If the same key is specified in\n`base_parameters` and in `run-now`, the value from `run-now` is used.\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.\n\nIf the notebook takes a parameter that is not specified in the jobs `base_parameters` or the `run-now` override parameters,\nthe default value from the notebook is used.\n\nRetrieve these parameters in a notebook using [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-widgets).\n\nThe JSON representation of this field cannot exceed 1MB.",
"$ref": "#/$defs/map/string"
},
"notebook_path": {
"description": "The path of the notebook to be run in the Databricks workspace or remote repository.\nFor notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash.\nFor notebooks stored in a remote repository, the path must be relative. This field is required.",
"$ref": "#/$defs/string"
},
"source": {
"description": "Optional location type of the notebook. When set to `WORKSPACE`, the notebook will be retrieved from the local Databricks workspace. When set to `GIT`, the notebook will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n* `WORKSPACE`: Notebook is located in Databricks workspace.\n* `GIT`: Notebook is located in cloud Git provider.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source"
},
"warehouse_id": {
"description": "Optional `warehouse_id` to run the notebook on a SQL warehouse. Classic SQL warehouses are NOT supported, please use serverless or pro SQL warehouses.\n\nNote that SQL warehouses only support SQL cells; if the notebook contains non-SQL cells, the run will fail.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"notebook_path"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.PauseStatus": {
"type": "string",
"enum": [
"UNPAUSED",
"PAUSED"
]
},
"jobs.PeriodicTriggerConfiguration": {
"anyOf": [
{
"type": "object",
"properties": {
"interval": {
"description": "The interval at which the trigger should run.",
"$ref": "#/$defs/int"
},
"unit": {
"description": "The unit of time for the interval.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfigurationTimeUnit"
}
},
"additionalProperties": false,
"required": [
"interval",
"unit"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.PeriodicTriggerConfigurationTimeUnit": {
"type": "string",
"enum": [
"HOURS",
"DAYS",
"WEEKS"
]
},
"jobs.PipelineParams": {
"anyOf": [
{
"type": "object",
"properties": {
"full_refresh": {
"description": "If true, triggers a full refresh on the delta live table.",
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.PipelineTask": {
"anyOf": [
{
"type": "object",
"properties": {
"full_refresh": {
"description": "If true, triggers a full refresh on the delta live table.",
"$ref": "#/$defs/bool"
},
"pipeline_id": {
"description": "The full name of the pipeline task to execute.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"pipeline_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.PythonWheelTask": {
"anyOf": [
{
"type": "object",
"properties": {
"entry_point": {
"description": "Named entry point to use, if it does not exist in the metadata of the package it executes the function from the package directly using `$packageName.$entryPoint()`",
"$ref": "#/$defs/string"
},
"named_parameters": {
"description": "Command-line parameters passed to Python wheel task in the form of `[\"--name=task\", \"--data=dbfs:/path/to/data.json\"]`. Leave it empty if `parameters` is not null.",
"$ref": "#/$defs/map/string"
},
"package_name": {
"description": "Name of the package to execute",
"$ref": "#/$defs/string"
},
"parameters": {
"description": "Command-line parameters passed to Python wheel task. Leave it empty if `named_parameters` is not null.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false,
"required": [
"entry_point",
"package_name"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.QueueSettings": {
"anyOf": [
{
"type": "object",
"properties": {
"enabled": {
"description": "If true, enable queueing for the job. This is a required field.",
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false,
"required": [
"enabled"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.RunIf": {
"type": "string",
"description": "An optional value indicating the condition that determines whether the task should be run once its dependencies have been completed. When omitted, defaults to `ALL_SUCCESS`.\n\nPossible values are:\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed",
"enum": [
"ALL_SUCCESS",
"ALL_DONE",
"NONE_FAILED",
"AT_LEAST_ONE_SUCCESS",
"ALL_FAILED",
"AT_LEAST_ONE_FAILED"
]
},
"jobs.RunJobTask": {
"anyOf": [
{
"type": "object",
"properties": {
"dbt_commands": {
"description": "An array of commands to execute for jobs with the dbt task, for example `\"dbt_commands\": [\"dbt deps\", \"dbt seed\", \"dbt deps\", \"dbt seed\", \"dbt run\"]`",
"$ref": "#/$defs/slice/string"
},
"jar_params": {
"description": "A list of parameters for jobs with Spark JAR tasks, for example `\"jar_params\": [\"john doe\", \"35\"]`.\nThe parameters are used to invoke the main function of the main class specified in the Spark JAR task.\nIf not specified upon `run-now`, it defaults to an empty list.\njar_params cannot be specified in conjunction with notebook_params.\nThe JSON representation of this field (for example `{\"jar_params\":[\"john doe\",\"35\"]}`) cannot exceed 10,000 bytes.\n\nUse [Task parameter variables](/jobs.html\\\"#parameter-variables\\\") to set parameters containing information about job runs.",
"$ref": "#/$defs/slice/string"
},
"job_id": {
"description": "ID of the job to trigger.",
"$ref": "#/$defs/int64"
},
"job_parameters": {
"description": "Job-level parameters used to trigger the job.",
"$ref": "#/$defs/map/string"
},
"notebook_params": {
"description": "A map from keys to values for jobs with notebook task, for example `\"notebook_params\": {\"name\": \"john doe\", \"age\": \"35\"}`.\nThe map is passed to the notebook and is accessible through the [dbutils.widgets.get](https://docs.databricks.com/dev-tools/databricks-utils.html) function.\n\nIf not specified upon `run-now`, the triggered run uses the jobs base parameters.\n\nnotebook_params cannot be specified in conjunction with jar_params.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.\n\nThe JSON representation of this field (for example `{\"notebook_params\":{\"name\":\"john doe\",\"age\":\"35\"}}`) cannot exceed 10,000 bytes.",
"$ref": "#/$defs/map/string"
},
"pipeline_params": {
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"description": "Controls whether the pipeline should perform a full refresh",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams"
},
"python_named_params": {
"$ref": "#/$defs/map/string"
},
"python_params": {
"description": "A list of parameters for jobs with Python tasks, for example `\"python_params\": [\"john doe\", \"35\"]`.\nThe parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it would overwrite\nthe parameters specified in job setting. The JSON representation of this field (for example `{\"python_params\":[\"john doe\",\"35\"]}`)\ncannot exceed 10,000 bytes.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.\n\nImportant\n\nThese parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error.\nExamples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis.",
"$ref": "#/$defs/slice/string"
},
"spark_submit_params": {
"description": "A list of parameters for jobs with spark submit task, for example `\"spark_submit_params\": [\"--class\", \"org.apache.spark.examples.SparkPi\"]`.\nThe parameters are passed to spark-submit script as command-line parameters. If specified upon `run-now`, it would overwrite the\nparameters specified in job setting. The JSON representation of this field (for example `{\"python_params\":[\"john doe\",\"35\"]}`)\ncannot exceed 10,000 bytes.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs\n\nImportant\n\nThese parameters accept only Latin characters (ASCII character set). Using non-ASCII characters returns an error.\nExamples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and emojis.",
"$ref": "#/$defs/slice/string"
},
"sql_params": {
"description": "A map from keys to values for jobs with SQL task, for example `\"sql_params\": {\"name\": \"john doe\", \"age\": \"35\"}`. The SQL alert task does not support custom parameters.",
"$ref": "#/$defs/map/string"
}
},
"additionalProperties": false,
"required": [
"job_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.Source": {
"type": "string",
"description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\\\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.",
"enum": [
"WORKSPACE",
"GIT"
]
},
"jobs.SparkJarTask": {
"anyOf": [
{
"type": "object",
"properties": {
"jar_uri": {
"description": "Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.",
"$ref": "#/$defs/string"
},
"main_class_name": {
"description": "The full name of the class containing the main method to be executed. This class must be contained in a JAR provided as a library.\n\nThe code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.",
"$ref": "#/$defs/string"
},
"parameters": {
"description": "Parameters passed to the main method.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SparkPythonTask": {
"anyOf": [
{
"type": "object",
"properties": {
"parameters": {
"description": "Command line parameters passed to the Python file.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.",
"$ref": "#/$defs/slice/string"
},
"python_file": {
"description": "The Python file to be executed. Cloud file URIs (such as dbfs:/, s3:/, adls:/, gcs:/) and workspace paths are supported. For python files stored in the Databricks workspace, the path must be absolute and begin with `/`. For files stored in a remote repository, the path must be relative. This field is required.",
"$ref": "#/$defs/string"
},
"source": {
"description": "Optional location type of the Python file. When set to `WORKSPACE` or not specified, the file will be retrieved from the local\nDatabricks workspace or cloud location (if the `python_file` has a URI format). When set to `GIT`,\nthe Python file will be retrieved from a Git repository defined in `git_source`.\n\n* `WORKSPACE`: The Python file is located in a Databricks workspace or at a cloud filesystem URI.\n* `GIT`: The Python file is located in a remote Git repository.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source"
}
},
"additionalProperties": false,
"required": [
"python_file"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SparkSubmitTask": {
"anyOf": [
{
"type": "object",
"properties": {
"parameters": {
"description": "Command-line parameters passed to spark submit.\n\nUse [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SqlTask": {
"anyOf": [
{
"type": "object",
"properties": {
"alert": {
"description": "If alert, indicates that this job must refresh a SQL alert.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskAlert"
},
"dashboard": {
"description": "If dashboard, indicates that this job must refresh a SQL dashboard.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskDashboard"
},
"file": {
"description": "If file, indicates that this job runs a SQL file in a remote Git repository.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskFile"
},
"parameters": {
"description": "Parameters to be used for each run of this job. The SQL alert task does not support custom parameters.",
"$ref": "#/$defs/map/string"
},
"query": {
"description": "If query, indicates that this job must execute a SQL query.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskQuery"
},
"warehouse_id": {
"description": "The canonical identifier of the SQL warehouse. Recommended to use with serverless or pro SQL warehouses. Classic SQL warehouses are only supported for SQL alert, dashboard and query tasks and are limited to scheduled single-task jobs.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"warehouse_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SqlTaskAlert": {
"anyOf": [
{
"type": "object",
"properties": {
"alert_id": {
"description": "The canonical identifier of the SQL alert.",
"$ref": "#/$defs/string"
},
"pause_subscriptions": {
"description": "If true, the alert notifications are not sent to subscribers.",
"$ref": "#/$defs/bool"
},
"subscriptions": {
"description": "If specified, alert notifications are sent to subscribers.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription"
}
},
"additionalProperties": false,
"required": [
"alert_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SqlTaskDashboard": {
"anyOf": [
{
"type": "object",
"properties": {
"custom_subject": {
"description": "Subject of the email sent to subscribers of this task.",
"$ref": "#/$defs/string"
},
"dashboard_id": {
"description": "The canonical identifier of the SQL dashboard.",
"$ref": "#/$defs/string"
},
"pause_subscriptions": {
"description": "If true, the dashboard snapshot is not taken, and emails are not sent to subscribers.",
"$ref": "#/$defs/bool"
},
"subscriptions": {
"description": "If specified, dashboard snapshots are sent to subscriptions.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription"
}
},
"additionalProperties": false,
"required": [
"dashboard_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SqlTaskFile": {
"anyOf": [
{
"type": "object",
"properties": {
"path": {
"description": "Path of the SQL file. Must be relative if the source is a remote Git repository and absolute for workspace paths.",
"$ref": "#/$defs/string"
},
"source": {
"description": "Optional location type of the SQL file. When set to `WORKSPACE`, the SQL file will be retrieved\nfrom the local Databricks workspace. When set to `GIT`, the SQL file will be retrieved from a Git repository\ndefined in `git_source`. If the value is empty, the task will use `GIT` if `git_source` is defined and `WORKSPACE` otherwise.\n\n* `WORKSPACE`: SQL file is located in Databricks workspace.\n* `GIT`: SQL file is located in cloud Git provider.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Source"
}
},
"additionalProperties": false,
"required": [
"path"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SqlTaskQuery": {
"anyOf": [
{
"type": "object",
"properties": {
"query_id": {
"description": "The canonical identifier of the SQL query.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"query_id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SqlTaskSubscription": {
"anyOf": [
{
"type": "object",
"properties": {
"destination_id": {
"description": "The canonical identifier of the destination to receive email notification. This parameter is mutually exclusive with user_name. You cannot set both destination_id and user_name for subscription notifications.",
"$ref": "#/$defs/string"
},
"user_name": {
"description": "The user name to receive the subscription email. This parameter is mutually exclusive with destination_id. You cannot set both destination_id and user_name for subscription notifications.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.TableUpdateTriggerConfiguration": {
"anyOf": [
{
"type": "object",
"properties": {
"condition": {
"description": "The table(s) condition based on which to trigger a job run.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Condition"
},
"min_time_between_triggers_seconds": {
"description": "If set, the trigger starts a run only after the specified amount of time has passed since\nthe last time the trigger fired. The minimum allowed value is 60 seconds.",
"$ref": "#/$defs/int"
},
"table_names": {
"description": "A list of Delta tables to monitor for changes. The table name must be in the format `catalog_name.schema_name.table_name`.",
"$ref": "#/$defs/slice/string"
},
"wait_after_last_change_seconds": {
"description": "If set, the trigger starts a run only after no table updates have occurred for the specified time\nand can be used to wait for a series of table updates before triggering a run. The\nminimum allowed value is 60 seconds.",
"$ref": "#/$defs/int"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.Task": {
"anyOf": [
{
"type": "object",
"properties": {
"condition_task": {
"description": "If condition_task, specifies a condition with an outcome that can be used to control the execution of other tasks. Does not require a cluster to execute and does not support retries or notifications.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ConditionTask"
},
"dbt_task": {
"description": "If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.DbtTask"
},
"depends_on": {
"description": "An optional array of objects specifying the dependency graph of the task. All tasks specified in this field must complete before executing this task. The task will run only if the `run_if` condition is true.\nThe key is `task_key`, and the value is the name assigned to the dependent task.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency"
},
"description": {
"description": "An optional description for this task.",
"$ref": "#/$defs/string"
},
"disable_auto_optimization": {
"description": "An option to disable auto optimization in serverless",
"$ref": "#/$defs/bool"
},
"email_notifications": {
"description": "An optional set of email addresses that is notified when runs of this task begin or complete as well as when this task is deleted. The default behavior is to not send any emails.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskEmailNotifications"
},
"environment_key": {
"description": "The key that references an environment spec in a job. This field is required for Python script, Python wheel and dbt tasks when using serverless compute.",
"$ref": "#/$defs/string"
},
"existing_cluster_id": {
"description": "If existing_cluster_id, the ID of an existing cluster that is used for all runs.\nWhen running jobs or tasks on an existing cluster, you may need to manually restart\nthe cluster if it stops responding. We suggest running jobs and tasks on new clusters for\ngreater reliability",
"$ref": "#/$defs/string"
},
"for_each_task": {
"description": "If for_each_task, indicates that this task must execute the nested task within it.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.ForEachTask"
},
"health": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRules"
},
"job_cluster_key": {
"description": "If job_cluster_key, this task is executed reusing the cluster specified in `job.settings.job_clusters`.",
"$ref": "#/$defs/string"
},
"libraries": {
"description": "An optional list of libraries to be installed on the cluster.\nThe default value is an empty list.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.Library"
},
"max_retries": {
"description": "An optional maximum number of times to retry an unsuccessful run. A run is considered to be unsuccessful if it completes with the `FAILED` result_state or `INTERNAL_ERROR` `life_cycle_state`. The value `-1` means to retry indefinitely and the value `0` means to never retry.",
"$ref": "#/$defs/int"
},
"min_retry_interval_millis": {
"description": "An optional minimal interval in milliseconds between the start of the failed run and the subsequent retry run. The default behavior is that unsuccessful runs are immediately retried.",
"$ref": "#/$defs/int"
},
"new_cluster": {
"description": "If new_cluster, a description of a new cluster that is created for each run.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterSpec"
},
"notebook_task": {
"description": "If notebook_task, indicates that this task must run a notebook. This field may not be specified in conjunction with spark_jar_task.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.NotebookTask"
},
"notification_settings": {
"description": "Optional notification settings that are used when sending notifications to each of the `email_notifications` and `webhook_notifications` for this task.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskNotificationSettings"
},
"pipeline_task": {
"description": "If pipeline_task, indicates that this task must execute a Pipeline.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineTask"
},
"python_wheel_task": {
"description": "If python_wheel_task, indicates that this job must execute a PythonWheel.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PythonWheelTask"
},
"retry_on_timeout": {
"description": "An optional policy to specify whether to retry a job when it times out. The default behavior\nis to not retry on timeout.",
"$ref": "#/$defs/bool"
},
"run_if": {
"description": "An optional value specifying the condition determining whether the task is run once its dependencies have been completed.\n\n* `ALL_SUCCESS`: All dependencies have executed and succeeded\n* `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded\n* `NONE_FAILED`: None of the dependencies have failed and at least one was executed\n* `ALL_DONE`: All dependencies have been completed\n* `AT_LEAST_ONE_FAILED`: At least one dependency failed\n* `ALL_FAILED`: ALl dependencies have failed",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunIf"
},
"run_job_task": {
"description": "If run_job_task, indicates that this task must execute another job.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.RunJobTask"
},
"spark_jar_task": {
"description": "If spark_jar_task, indicates that this task must run a JAR.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkJarTask"
},
"spark_python_task": {
"description": "If spark_python_task, indicates that this task must run a Python file.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkPythonTask"
},
"spark_submit_task": {
"description": "If `spark_submit_task`, indicates that this task must be launched by the spark submit script. This task can run only on new clusters.\n\nIn the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark configurations.\n\n`master`, `deploy-mode`, and `executor-cores` are automatically configured by Databricks; you _cannot_ specify them in parameters.\n\nBy default, the Spark submit job uses all available memory (excluding reserved memory for Databricks services). You can set `--driver-memory`, and `--executor-memory` to a smaller value to leave some room for off-heap usage.\n\nThe `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SparkSubmitTask"
},
"sql_task": {
"description": "If sql_task, indicates that this job must execute a SQL task.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTask"
},
"task_key": {
"description": "A unique name for the task. This field is used to refer to this task from other tasks.\nThis field is required and must be unique within its parent job.\nOn Update or Reset, this field is used to reference the tasks to be updated or reset.",
"$ref": "#/$defs/string"
},
"timeout_seconds": {
"description": "An optional timeout applied to each run of this job task. A value of `0` means no timeout.",
"$ref": "#/$defs/int"
},
"webhook_notifications": {
"description": "A collection of system notification IDs to notify when runs of this task begin or complete. The default behavior is to not send any system notifications.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.WebhookNotifications"
}
},
"additionalProperties": false,
"required": [
"task_key"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.TaskDependency": {
"anyOf": [
{
"type": "object",
"properties": {
"outcome": {
"description": "Can only be specified on condition task dependencies. The outcome of the dependent task that must be met for this task to run.",
"$ref": "#/$defs/string"
},
"task_key": {
"description": "The name of the task this task depends on.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"task_key"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.TaskEmailNotifications": {
"anyOf": [
{
"type": "object",
"properties": {
"no_alert_for_skipped_runs": {
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"$ref": "#/$defs/bool"
},
"on_duration_warning_threshold_exceeded": {
"description": "A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.",
"$ref": "#/$defs/slice/string"
},
"on_failure": {
"description": "A list of email addresses to be notified when a run unsuccessfully completes. A run is considered to have completed unsuccessfully if it ends with an `INTERNAL_ERROR` `life_cycle_state` or a `FAILED`, or `TIMED_OUT` result_state. If this is not specified on job creation, reset, or update the list is empty, and notifications are not sent.",
"$ref": "#/$defs/slice/string"
},
"on_start": {
"description": "A list of email addresses to be notified when a run begins. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"$ref": "#/$defs/slice/string"
},
"on_streaming_backlog_exceeded": {
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
"$ref": "#/$defs/slice/string"
},
"on_success": {
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.TaskNotificationSettings": {
"anyOf": [
{
"type": "object",
"properties": {
"alert_on_last_attempt": {
"description": "If true, do not send notifications to recipients specified in `on_start` for the retried runs and do not send notifications to recipients specified in `on_failure` until the last retry of the run.",
"$ref": "#/$defs/bool"
},
"no_alert_for_canceled_runs": {
"description": "If true, do not send notifications to recipients specified in `on_failure` if the run is canceled.",
"$ref": "#/$defs/bool"
},
"no_alert_for_skipped_runs": {
"description": "If true, do not send notifications to recipients specified in `on_failure` if the run is skipped.",
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.TriggerSettings": {
"anyOf": [
{
"type": "object",
"properties": {
"file_arrival": {
"description": "File arrival trigger settings.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.FileArrivalTriggerConfiguration"
},
"pause_status": {
"description": "Whether this trigger is paused or not.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PauseStatus"
},
"periodic": {
"description": "Periodic trigger settings.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PeriodicTriggerConfiguration"
},
"table": {
"description": "Old table trigger settings name. Deprecated in favor of `table_update`.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration"
},
"table_update": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TableUpdateTriggerConfiguration"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.Webhook": {
"anyOf": [
{
"type": "object",
"properties": {
"id": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"id"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.WebhookNotifications": {
"anyOf": [
{
"type": "object",
"properties": {
"on_duration_warning_threshold_exceeded": {
"description": "An optional list of system notification IDs to call when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. A maximum of 3 destinations can be specified for the `on_duration_warning_threshold_exceeded` property.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook"
},
"on_failure": {
"description": "An optional list of system notification IDs to call when the run fails. A maximum of 3 destinations can be specified for the `on_failure` property.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook"
},
"on_start": {
"description": "An optional list of system notification IDs to call when the run starts. A maximum of 3 destinations can be specified for the `on_start` property.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook"
},
"on_streaming_backlog_exceeded": {
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook"
},
"on_success": {
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/jobs.Webhook"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ExperimentTag": {
"anyOf": [
{
"type": "object",
"properties": {
"key": {
"description": "The tag key.",
"$ref": "#/$defs/string"
},
"value": {
"description": "The tag value.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ModelTag": {
"anyOf": [
{
"type": "object",
"properties": {
"key": {
"description": "The tag key.",
"$ref": "#/$defs/string"
},
"value": {
"description": "The tag value.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ModelVersion": {
"anyOf": [
{
"type": "object",
"properties": {
"creation_timestamp": {
"description": "Timestamp recorded when this `model_version` was created.",
"$ref": "#/$defs/int64"
},
"current_stage": {
"description": "Current stage for this `model_version`.",
"$ref": "#/$defs/string"
},
"description": {
"description": "Description of this `model_version`.",
"$ref": "#/$defs/string"
},
"last_updated_timestamp": {
"description": "Timestamp recorded when metadata for this `model_version` was last updated.",
"$ref": "#/$defs/int64"
},
"name": {
"description": "Unique name of the model",
"$ref": "#/$defs/string"
},
"run_id": {
"description": "MLflow run ID used when creating `model_version`, if `source` was generated by an\nexperiment run stored in MLflow tracking server.",
"$ref": "#/$defs/string"
},
"run_link": {
"description": "Run Link: Direct link to the run that generated this version",
"$ref": "#/$defs/string"
},
"source": {
"description": "URI indicating the location of the source model artifacts, used when creating `model_version`",
"$ref": "#/$defs/string"
},
"status": {
"description": "Current status of `model_version`",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/ml.ModelVersionStatus",
"enum": [
"PENDING_REGISTRATION",
"FAILED_REGISTRATION",
"READY"
]
},
"status_message": {
"description": "Details on current `status`, if it is pending or failed.",
"$ref": "#/$defs/string"
},
"tags": {
"description": "Tags: Additional metadata key-value pairs for this `model_version`.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/ml.ModelVersionTag"
},
"user_id": {
"description": "User that created this `model_version`.",
"$ref": "#/$defs/string"
},
"version": {
"description": "Model's version number.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ModelVersionStatus": {
"type": "string"
},
"ml.ModelVersionTag": {
"anyOf": [
{
"type": "object",
"properties": {
"key": {
"description": "The tag key.",
"$ref": "#/$defs/string"
},
"value": {
"description": "The tag value.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.CronTrigger": {
"anyOf": [
{
"type": "object",
"properties": {
"quartz_cron_schedule": {
"$ref": "#/$defs/string"
},
"timezone_id": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.DeploymentKind": {
"type": "string",
"description": "The deployment method that manages the pipeline:\n- BUNDLE: The pipeline is managed by a Databricks Asset Bundle.\n",
"enum": [
"BUNDLE"
]
},
"pipelines.FileLibrary": {
"anyOf": [
{
"type": "object",
"properties": {
"path": {
"description": "The absolute path of the file.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.Filters": {
"anyOf": [
{
"type": "object",
"properties": {
"exclude": {
"description": "Paths to exclude.",
"$ref": "#/$defs/slice/string"
},
"include": {
"description": "Paths to include.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.IngestionConfig": {
"anyOf": [
{
"type": "object",
"properties": {
Bump github.com/databricks/databricks-sdk-go from 0.48.0 to 0.49.0 (#1843) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.48.0 to 0.49.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/b47ecac9ea73eeb0acaa4691ebdc2a8e8c2c03b9"><code>b47ecac</code></a> [Release] Release v0.49.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1062">#1062</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.48.0...v0.49.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.48.0&new-version=0.49.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-22 09:37:01 +00:00
"report": {
"description": "Select tables from a specific source report.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ReportSpec"
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"schema": {
"description": "Select tables from a specific source schema.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.SchemaSpec"
},
"table": {
"description": "Select tables from a specific source table.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpec"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.IngestionGatewayPipelineDefinition": {
"anyOf": [
{
"type": "object",
"properties": {
"connection_id": {
"description": "Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.",
"$ref": "#/$defs/string"
},
"gateway_storage_catalog": {
"description": "Required, Immutable. The name of the catalog for the gateway pipeline's storage location.",
"$ref": "#/$defs/string"
},
"gateway_storage_name": {
"description": "Optional. The Unity Catalog-compatible name for the gateway storage location.\nThis is the destination to use for the data that is extracted by the gateway.\nDelta Live Tables system will automatically create the storage location under the catalog and schema.\n",
"$ref": "#/$defs/string"
},
"gateway_storage_schema": {
"description": "Required, Immutable. The name of the schema for the gateway pipelines's storage location.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.IngestionPipelineDefinition": {
"anyOf": [
{
"type": "object",
"properties": {
"connection_name": {
"description": "Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the source. Specify either ingestion_gateway_id or connection_name.",
"$ref": "#/$defs/string"
},
"ingestion_gateway_id": {
"description": "Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate with the source. Specify either ingestion_gateway_id or connection_name.",
"$ref": "#/$defs/string"
},
"objects": {
"description": "Required. Settings specifying tables to replicate and the destination for the replicated tables.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig"
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in the pipeline.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.ManualTrigger": {
"anyOf": [
{
"type": "object",
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.NotebookLibrary": {
"anyOf": [
{
"type": "object",
"properties": {
"path": {
"description": "The absolute path of the notebook.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.Notifications": {
"anyOf": [
{
"type": "object",
"properties": {
"alerts": {
"description": "A list of alerts that trigger the sending of notifications to the configured\ndestinations. The supported alerts are:\n\n* `on-update-success`: A pipeline update completes successfully.\n* `on-update-failure`: Each time a pipeline update fails.\n* `on-update-fatal-failure`: A pipeline update fails with a non-retryable (fatal) error.\n* `on-flow-failure`: A single data flow fails.\n",
"$ref": "#/$defs/slice/string"
},
"email_recipients": {
"description": "A list of email addresses notified when a configured alert is triggered.\n",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.PipelineCluster": {
"anyOf": [
{
"type": "object",
"properties": {
"apply_policy_default_values": {
"description": "Note: This field won't be persisted. Only API users will check this field.",
"$ref": "#/$defs/bool"
},
"autoscale": {
"description": "Parameters needed in order to automatically scale clusters up and down based on load.\nNote: autoscaling works best with DB runtime versions 3.0 or later.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscale"
},
"aws_attributes": {
"description": "Attributes related to clusters running on Amazon Web Services.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AwsAttributes"
},
"azure_attributes": {
"description": "Attributes related to clusters running on Microsoft Azure.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.AzureAttributes"
},
"cluster_log_conf": {
"description": "The configuration for delivering spark logs to a long-term storage destination.\nOnly dbfs destinations are supported. Only one destination can be specified\nfor one cluster. If the conf is given, the logs will be delivered to the destination every\n`5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while\nthe destination of executor logs is `$destination/$clusterId/executor`.\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.ClusterLogConf"
},
"custom_tags": {
"description": "Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS\ninstances and EBS volumes) with these tags in addition to `default_tags`. Notes:\n\n- Currently, Databricks allows at most 45 custom tags\n\n- Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags",
"$ref": "#/$defs/map/string"
},
"driver_instance_pool_id": {
"description": "The optional ID of the instance pool for the driver of the cluster belongs.\nThe pool cluster uses the instance pool with id (instance_pool_id) if the driver pool is not\nassigned.",
"$ref": "#/$defs/string"
},
"driver_node_type_id": {
"description": "The node type of the Spark driver.\nNote that this field is optional; if unset, the driver node type will be set as the same value\nas `node_type_id` defined above.",
"$ref": "#/$defs/string"
},
"enable_local_disk_encryption": {
"description": "Whether to enable local disk encryption for the cluster.",
"$ref": "#/$defs/bool"
},
"gcp_attributes": {
"description": "Attributes related to clusters running on Google Cloud Platform.\nIf not specified at cluster creation, a set of default values will be used.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.GcpAttributes"
},
"init_scripts": {
"description": "The configuration for storing init scripts. Any number of destinations can be specified. The scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script logs are sent to `\u003cdestination\u003e/\u003ccluster-ID\u003e/init_scripts`.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo"
},
"instance_pool_id": {
"description": "The optional ID of the instance pool to which the cluster belongs.",
"$ref": "#/$defs/string"
},
"label": {
"description": "A label for the cluster specification, either `default` to configure the default cluster, or `maintenance` to configure the maintenance cluster. This field is optional. The default value is `default`.",
"$ref": "#/$defs/string"
},
"node_type_id": {
"description": "This field encodes, through a single value, the resources available to each of\nthe Spark nodes in this cluster. For example, the Spark nodes can be provisioned\nand optimized for memory or compute intensive workloads. A list of available node\ntypes can be retrieved by using the :method:clusters/listNodeTypes API call.\n",
"$ref": "#/$defs/string"
},
"num_workers": {
"description": "Number of worker nodes that this cluster should have. A cluster has one Spark Driver\nand `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.\n\nNote: When reading the properties of a cluster, this field reflects the desired number\nof workers rather than the actual current number of workers. For instance, if a cluster\nis resized from 5 to 10 workers, this field will immediately be updated to reflect\nthe target size of 10 workers, whereas the workers listed in `spark_info` will gradually\nincrease from 5 to 10 as the new nodes are provisioned.",
"$ref": "#/$defs/int"
},
"policy_id": {
"description": "The ID of the cluster policy used to create the cluster if applicable.",
"$ref": "#/$defs/string"
},
"spark_conf": {
"description": "An object containing a set of optional, user-specified Spark configuration key-value pairs.\nSee :method:clusters/create for more details.\n",
"$ref": "#/$defs/map/string"
},
"spark_env_vars": {
"description": "An object containing a set of optional, user-specified environment variable key-value pairs.\nPlease note that key-value pair of the form (X,Y) will be exported as is (i.e.,\n`export X='Y'`) while launching the driver and workers.\n\nIn order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending\nthem to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all\ndefault databricks managed environmental variables are included as well.\n\nExample Spark environment variables:\n`{\"SPARK_WORKER_MEMORY\": \"28000m\", \"SPARK_LOCAL_DIRS\": \"/local_disk0\"}` or\n`{\"SPARK_DAEMON_JAVA_OPTS\": \"$SPARK_DAEMON_JAVA_OPTS -Dspark.shuffle.service.enabled=true\"}`",
"$ref": "#/$defs/map/string"
},
"ssh_public_keys": {
"description": "SSH public key contents that will be added to each Spark node in this cluster. The\ncorresponding private keys can be used to login with the user name `ubuntu` on port `2200`.\nUp to 10 keys can be specified.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.PipelineClusterAutoscale": {
"anyOf": [
{
"type": "object",
"properties": {
"max_workers": {
"description": "The maximum number of workers to which the cluster can scale up when overloaded. `max_workers` must be strictly greater than `min_workers`.",
"$ref": "#/$defs/int"
},
"min_workers": {
"description": "The minimum number of workers the cluster can scale down to when underutilized.\nIt is also the initial number of workers the cluster will have after creation.",
"$ref": "#/$defs/int"
},
"mode": {
"description": "Databricks Enhanced Autoscaling optimizes cluster utilization by automatically\nallocating cluster resources based on workload volume, with minimal impact to\nthe data processing latency of your pipelines. Enhanced Autoscaling is available\nfor `updates` clusters only. The legacy autoscaling feature is used for `maintenance`\nclusters.\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineClusterAutoscaleMode",
"enum": [
"ENHANCED",
"LEGACY"
]
}
},
"additionalProperties": false,
"required": [
"max_workers",
"min_workers"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.PipelineClusterAutoscaleMode": {
"type": "string"
},
"pipelines.PipelineDeployment": {
"anyOf": [
{
"type": "object",
"properties": {
"kind": {
"description": "The deployment method that manages the pipeline.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.DeploymentKind"
},
"metadata_file_path": {
"description": "The path to the file containing metadata about the deployment.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.PipelineLibrary": {
"anyOf": [
{
"type": "object",
"properties": {
"file": {
"description": "The path to a file that defines a pipeline and is stored in the Databricks Repos.\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.FileLibrary"
},
"jar": {
"description": "URI of the jar to be installed. Currently only DBFS is supported.\n",
"$ref": "#/$defs/string"
},
"maven": {
"description": "Specification of a maven library to be installed.\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.MavenLibrary"
},
"notebook": {
"description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.NotebookLibrary"
},
"whl": {
"description": "URI of the whl to be installed.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.PipelineTrigger": {
"anyOf": [
{
"type": "object",
"properties": {
"cron": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.CronTrigger"
},
"manual": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.ManualTrigger"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Bump github.com/databricks/databricks-sdk-go from 0.48.0 to 0.49.0 (#1843) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.48.0 to 0.49.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/b47ecac9ea73eeb0acaa4691ebdc2a8e8c2c03b9"><code>b47ecac</code></a> [Release] Release v0.49.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1062">#1062</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.48.0...v0.49.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.48.0&new-version=0.49.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-22 09:37:01 +00:00
"pipelines.ReportSpec": {
"anyOf": [
{
"type": "object",
"properties": {
"destination_catalog": {
"description": "Required. Destination catalog to store table.",
"$ref": "#/$defs/string"
},
"destination_schema": {
"description": "Required. Destination schema to store table.",
"$ref": "#/$defs/string"
},
"destination_table": {
"description": "Required. Destination table name. The pipeline fails if a table with that name already exists.",
"$ref": "#/$defs/string"
},
"source_url": {
"description": "Required. Report URL in the source system.",
"$ref": "#/$defs/string"
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"pipelines.SchemaSpec": {
"anyOf": [
{
"type": "object",
"properties": {
"destination_catalog": {
"description": "Required. Destination catalog to store tables.",
"$ref": "#/$defs/string"
},
"destination_schema": {
"description": "Required. Destination schema to store tables in. Tables with the same name as the source tables are created in this destination schema. The pipeline fails If a table with the same name already exists.",
"$ref": "#/$defs/string"
},
"source_catalog": {
"description": "The source catalog name. Might be optional depending on the type of source.",
"$ref": "#/$defs/string"
},
"source_schema": {
"description": "Required. Schema name in the source database.",
"$ref": "#/$defs/string"
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the IngestionPipelineDefinition object.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.TableSpec": {
"anyOf": [
{
"type": "object",
"properties": {
"destination_catalog": {
"description": "Required. Destination catalog to store table.",
"$ref": "#/$defs/string"
},
"destination_schema": {
"description": "Required. Destination schema to store table.",
"$ref": "#/$defs/string"
},
"destination_table": {
Bump github.com/databricks/databricks-sdk-go from 0.48.0 to 0.49.0 (#1843) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.48.0 to 0.49.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/b47ecac9ea73eeb0acaa4691ebdc2a8e8c2c03b9"><code>b47ecac</code></a> [Release] Release v0.49.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1062">#1062</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.48.0...v0.49.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.48.0&new-version=0.49.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-22 09:37:01 +00:00
"description": "Optional. Destination table name. The pipeline fails if a table with that name already exists. If not set, the source table name is used.",
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"$ref": "#/$defs/string"
},
"source_catalog": {
"description": "Source catalog name. Might be optional depending on the type of source.",
"$ref": "#/$defs/string"
},
"source_schema": {
"description": "Schema name in the source database. Might be optional depending on the type of source.",
"$ref": "#/$defs/string"
},
"source_table": {
"description": "Required. Table name in the source database.",
"$ref": "#/$defs/string"
},
"table_configuration": {
"description": "Configuration settings to control the ingestion of tables. These settings override the table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfig"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.TableSpecificConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"primary_keys": {
"description": "The primary key of the table used to apply changes.",
"$ref": "#/$defs/slice/string"
},
"salesforce_include_formula_fields": {
"description": "If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector",
"$ref": "#/$defs/bool"
},
"scd_type": {
"description": "The SCD type to use to ingest the table.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.TableSpecificConfigScdType",
"enum": [
"SCD_TYPE_1",
"SCD_TYPE_2"
]
Bump github.com/databricks/databricks-sdk-go from 0.48.0 to 0.49.0 (#1843) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.48.0 to 0.49.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.49.0</h2> <h3>API Changes:</h3> <ul> <li>Added <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/settings#DisableLegacyDbfsAPI">w.DisableLegacyDbfs</a> workspace-level service.</li> <li>Added <code>UnityCatalogProvisioningState</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/catalog#OnlineTable">catalog.OnlineTable</a>.</li> <li>Added <code>IsTruncated</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/dashboards#Result">dashboards.Result</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#BaseJob">jobs.BaseJob</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#CreateJob">jobs.CreateJob</a>.</li> <li>Added <code>EffectiveBudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#Job">jobs.Job</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#JobSettings">jobs.JobSettings</a>.</li> <li>Added <code>BudgetPolicyId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/jobs#SubmitRun">jobs.SubmitRun</a>.</li> <li>Added <code>Report</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#IngestionConfig">pipelines.IngestionConfig</a>.</li> <li>Added <code>SequenceBy</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#TableSpecificConfig">pipelines.TableSpecificConfig</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#Alert">sql.Alert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#CreateAlertRequestAlert">sql.CreateAlertRequestAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#ListAlertsResponseAlert">sql.ListAlertsResponseAlert</a>.</li> <li>Added <code>NotifyOnOk</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/sql#UpdateAlertRequestAlert">sql.UpdateAlertRequestAlert</a>.</li> </ul> <p>OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/b47ecac9ea73eeb0acaa4691ebdc2a8e8c2c03b9"><code>b47ecac</code></a> [Release] Release v0.49.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1062">#1062</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.48.0...v0.49.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.48.0&new-version=0.49.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-22 09:37:01 +00:00
},
"sequence_by": {
"description": "The column names specifying the logical order of events in the source data. Delta Live Tables uses this sequencing to handle change events that arrive out of order.",
"$ref": "#/$defs/slice/string"
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.TableSpecificConfigScdType": {
"type": "string"
},
"serving.Ai21LabsConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"ai21labs_api_key": {
"$ref": "#/$defs/string"
},
"ai21labs_api_key_plaintext": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"serving.AiGatewayConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"guardrails": {
"description": "Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrails"
},
"inference_table_config": {
"description": "Configuration for payload logging using inference tables. Use these tables to monitor and audit data being sent to and received from model APIs and to improve model quality.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayInferenceTableConfig"
},
"rate_limits": {
"description": "Configuration for rate limits which can be set to limit endpoint traffic.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit"
},
"usage_tracking_config": {
"description": "Configuration to enable usage tracking using system tables. These tables allow you to monitor operational usage on endpoints and their associated costs.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayUsageTrackingConfig"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AiGatewayGuardrailParameters": {
"anyOf": [
{
"type": "object",
"properties": {
"invalid_keywords": {
"description": "List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword exists in the request or response content.",
"$ref": "#/$defs/slice/string"
},
"pii": {
"description": "Configuration for guardrail PII filter.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehavior"
},
"safety": {
"description": "Indicates whether the safety filter is enabled.",
"$ref": "#/$defs/bool"
},
"valid_topics": {
"description": "The list of allowed topics. Given a chat request, this guardrail flags the request if its topic is not in the allowed topics.",
"$ref": "#/$defs/slice/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AiGatewayGuardrailPiiBehavior": {
"anyOf": [
{
"type": "object",
"properties": {
"behavior": {
"description": "Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailPiiBehaviorBehavior",
"enum": [
"NONE",
"BLOCK"
]
}
},
"additionalProperties": false,
"required": [
"behavior"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AiGatewayGuardrailPiiBehaviorBehavior": {
"type": "string"
},
"serving.AiGatewayGuardrails": {
"anyOf": [
{
"type": "object",
"properties": {
"input": {
"description": "Configuration for input guardrail filters.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters"
},
"output": {
"description": "Configuration for output guardrail filters.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayGuardrailParameters"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AiGatewayInferenceTableConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"catalog_name": {
"description": "The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the catalog name.",
"$ref": "#/$defs/string"
},
"enabled": {
"description": "Indicates whether the inference table is enabled.",
"$ref": "#/$defs/bool"
},
"schema_name": {
"description": "The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On update, you have to disable inference table first in order to change the schema name.",
"$ref": "#/$defs/string"
},
"table_name_prefix": {
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table first in order to change the prefix name.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AiGatewayRateLimit": {
"anyOf": [
{
"type": "object",
"properties": {
"calls": {
"description": "Used to specify how many calls are allowed for a key within the renewal_period.",
"$ref": "#/$defs/int"
},
"key": {
"description": "Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitKey",
"enum": [
"user",
"endpoint"
]
},
"renewal_period": {
"description": "Renewal period field for a rate limit. Currently, only 'minute' is supported.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimitRenewalPeriod",
"enum": [
"minute"
]
}
},
"additionalProperties": false,
"required": [
"calls",
"renewal_period"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AiGatewayRateLimitKey": {
"type": "string"
},
"serving.AiGatewayRateLimitRenewalPeriod": {
"type": "string"
},
"serving.AiGatewayUsageTrackingConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"enabled": {
"description": "Whether to enable usage tracking.",
"$ref": "#/$defs/bool"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"serving.AmazonBedrockConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"aws_access_key_id": {
"description": "The Databricks secret key reference for an AWS access key ID with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
"$ref": "#/$defs/string"
},
"aws_access_key_id_plaintext": {
"description": "An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. You must provide an API key using one of the following fields: `aws_access_key_id` or `aws_access_key_id_plaintext`.",
"$ref": "#/$defs/string"
},
"aws_region": {
"description": "The AWS region to use. Bedrock has to be enabled there.",
"$ref": "#/$defs/string"
},
"aws_secret_access_key": {
"description": "The Databricks secret key reference for an AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services. If you prefer to paste your API key directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
"$ref": "#/$defs/string"
},
"aws_secret_access_key_plaintext": {
"description": "An AWS secret access key paired with the access key ID, with permissions to interact with Bedrock services provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.",
"$ref": "#/$defs/string"
},
"bedrock_provider": {
"description": "The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfigBedrockProvider",
"enum": [
"anthropic",
"cohere",
"ai21labs",
"amazon"
]
}
},
"additionalProperties": false,
"required": [
"aws_region",
"bedrock_provider"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AmazonBedrockConfigBedrockProvider": {
"type": "string"
},
"serving.AnthropicConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"anthropic_api_key": {
"description": "The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
"$ref": "#/$defs/string"
},
"anthropic_api_key_plaintext": {
"description": "The Anthropic API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.AutoCaptureConfigInput": {
"anyOf": [
{
"type": "object",
"properties": {
"catalog_name": {
"description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled.",
"$ref": "#/$defs/string"
},
"enabled": {
"description": "Indicates whether the inference table is enabled.",
"$ref": "#/$defs/bool"
},
"schema_name": {
"description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled.",
"$ref": "#/$defs/string"
},
"table_name_prefix": {
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.CohereConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"cohere_api_base": {
"description": "This is an optional field to provide a customized base URL for the Cohere API. \nIf left unspecified, the standard Cohere base URL is used.\n",
"$ref": "#/$defs/string"
},
"cohere_api_key": {
"description": "The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
"$ref": "#/$defs/string"
},
"cohere_api_key_plaintext": {
"description": "The Cohere API key provided as a plaintext string. If you prefer to reference your key using Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following fields: `cohere_api_key` or `cohere_api_key_plaintext`.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.DatabricksModelServingConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"databricks_api_token": {
"description": "The Databricks secret key reference for a Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model.\nIf you prefer to paste your API key directly, see `databricks_api_token_plaintext`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
"$ref": "#/$defs/string"
},
"databricks_api_token_plaintext": {
"description": "The Databricks API token that corresponds to a user or service\nprincipal with Can Query access to the model serving endpoint pointed to by this external model provided as a plaintext string.\nIf you prefer to reference your key using Databricks Secrets, see `databricks_api_token`.\nYou must provide an API key using one of the following fields: `databricks_api_token` or `databricks_api_token_plaintext`.\n",
"$ref": "#/$defs/string"
},
"databricks_workspace_url": {
"description": "The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.\n",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"databricks_workspace_url"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.EndpointCoreConfigInput": {
"anyOf": [
{
"type": "object",
"properties": {
"auto_capture_config": {
"description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AutoCaptureConfigInput"
},
"served_entities": {
"description": "A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served entities.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput"
},
"served_models": {
"description": "(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A serving endpoint can have up to 15 served models.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput"
},
"traffic_config": {
"description": "The traffic config defining how invocations to the serving endpoint should be routed.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.TrafficConfig"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.EndpointTag": {
"anyOf": [
{
"type": "object",
"properties": {
"key": {
"description": "Key field for a serving endpoint tag.",
"$ref": "#/$defs/string"
},
"value": {
"description": "Optional value field for a serving endpoint tag.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"key"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.ExternalModel": {
"anyOf": [
{
"type": "object",
"properties": {
"ai21labs_config": {
"description": "AI21Labs Config. Only required if the provider is 'ai21labs'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.Ai21LabsConfig"
},
"amazon_bedrock_config": {
"description": "Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AmazonBedrockConfig"
},
"anthropic_config": {
"description": "Anthropic Config. Only required if the provider is 'anthropic'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AnthropicConfig"
},
"cohere_config": {
"description": "Cohere Config. Only required if the provider is 'cohere'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.CohereConfig"
},
"databricks_model_serving_config": {
"description": "Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.DatabricksModelServingConfig"
},
"google_cloud_vertex_ai_config": {
"description": "Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.GoogleCloudVertexAiConfig"
},
"name": {
"description": "The name of the external model.",
"$ref": "#/$defs/string"
},
"openai_config": {
"description": "OpenAI Config. Only required if the provider is 'openai'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.OpenAiConfig"
},
"palm_config": {
"description": "PaLM Config. Only required if the provider is 'palm'.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.PaLmConfig"
},
"provider": {
"description": "The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic',\n'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.\",\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModelProvider",
"enum": [
"ai21labs",
"anthropic",
"amazon-bedrock",
"cohere",
"databricks-model-serving",
"google-cloud-vertex-ai",
"openai",
"palm"
]
},
"task": {
"description": "The task type of the external model.",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false,
"required": [
"name",
"provider",
"task"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.ExternalModelProvider": {
"type": "string"
},
"serving.GoogleCloudVertexAiConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"private_key": {
"$ref": "#/$defs/string"
},
"private_key_plaintext": {
"$ref": "#/$defs/string"
},
"project_id": {
"$ref": "#/$defs/string"
},
"region": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.OpenAiConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"microsoft_entra_client_id": {
"$ref": "#/$defs/string"
},
"microsoft_entra_client_secret": {
"$ref": "#/$defs/string"
},
"microsoft_entra_client_secret_plaintext": {
"$ref": "#/$defs/string"
},
"microsoft_entra_tenant_id": {
"$ref": "#/$defs/string"
},
"openai_api_base": {
"$ref": "#/$defs/string"
},
"openai_api_key": {
"$ref": "#/$defs/string"
},
"openai_api_key_plaintext": {
"$ref": "#/$defs/string"
},
"openai_api_type": {
"$ref": "#/$defs/string"
},
"openai_api_version": {
"$ref": "#/$defs/string"
},
"openai_deployment_name": {
"$ref": "#/$defs/string"
},
"openai_organization": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.PaLmConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"palm_api_key": {
"$ref": "#/$defs/string"
},
"palm_api_key_plaintext": {
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.RateLimit": {
"anyOf": [
{
"type": "object",
"properties": {
"calls": {
"description": "Used to specify how many calls are allowed for a key within the renewal_period.",
"$ref": "#/$defs/int"
},
"key": {
"description": "Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitKey",
"enum": [
"user",
"endpoint"
]
},
"renewal_period": {
"description": "Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimitRenewalPeriod",
"enum": [
"minute"
]
}
},
"additionalProperties": false,
"required": [
"calls",
"renewal_period"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.RateLimitKey": {
"type": "string"
},
"serving.RateLimitRenewalPeriod": {
"type": "string"
},
"serving.Route": {
"anyOf": [
{
"type": "object",
"properties": {
"served_model_name": {
"description": "The name of the served model this route configures traffic for.",
"$ref": "#/$defs/string"
},
"traffic_percentage": {
"description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive.",
"$ref": "#/$defs/int"
}
},
"additionalProperties": false,
"required": [
"served_model_name",
"traffic_percentage"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.ServedEntityInput": {
"anyOf": [
{
"type": "object",
"properties": {
"entity_name": {
"description": "The name of the entity to be served. The entity may be a model in the Databricks Model Registry, a model in the Unity Catalog (UC),\nor a function of type FEATURE_SPEC in the UC. If it is a UC object, the full name of the object should be given in the form of\n__catalog_name__.__schema_name__.__model_name__.\n",
"$ref": "#/$defs/string"
},
"entity_version": {
"description": "The version of the model in Databricks Model Registry to be served or empty if the entity is a FEATURE_SPEC.",
"$ref": "#/$defs/string"
},
"environment_vars": {
"description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity.\nNote: this is an experimental feature and subject to change. \nExample entity environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
"$ref": "#/$defs/map/string"
},
"external_model": {
"description": "The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled)\ncan be specified with the latter set being used for custom model serving for a Databricks registered model. For an existing endpoint with external_model,\nit cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add external_model later.\nThe task type of all external models within an endpoint must be the same.\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ExternalModel"
},
"instance_profile_arn": {
"description": "ARN of the instance profile that the served entity uses to access AWS resources.",
"$ref": "#/$defs/string"
},
"max_provisioned_throughput": {
"description": "The maximum tokens per second that the endpoint can scale up to.",
"$ref": "#/$defs/int"
},
"min_provisioned_throughput": {
"description": "The minimum tokens per second that the endpoint can scale down to.",
"$ref": "#/$defs/int"
},
"name": {
"description": "The name of a served entity. It must be unique across an endpoint. A served entity name can consist of alphanumeric characters, dashes, and underscores.\nIf not specified for an external model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if not specified for other\nentities, it defaults to \u003centity-name\u003e-\u003centity-version\u003e.\n",
"$ref": "#/$defs/string"
},
"scale_to_zero_enabled": {
"description": "Whether the compute resources for the served entity should scale down to zero.",
"$ref": "#/$defs/bool"
},
"workload_size": {
"description": "The workload size of the served entity. The workload size corresponds to a range of provisioned concurrency that the compute autoscales between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size is 0.\n",
"$ref": "#/$defs/string"
},
"workload_type": {
"description": "The workload type of the served entity. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
"$ref": "#/$defs/string"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.ServedModelInput": {
"anyOf": [
{
"type": "object",
"properties": {
"environment_vars": {
"description": "An object containing a set of optional, user-specified environment variable key-value pairs used for serving this model.\nNote: this is an experimental feature and subject to change. \nExample model environment variables that refer to Databricks secrets: `{\"OPENAI_API_KEY\": \"{{secrets/my_scope/my_key}}\", \"DATABRICKS_TOKEN\": \"{{secrets/my_scope2/my_key2}}\"}`",
"$ref": "#/$defs/map/string"
},
"instance_profile_arn": {
"description": "ARN of the instance profile that the served model will use to access AWS resources.",
"$ref": "#/$defs/string"
},
"max_provisioned_throughput": {
"description": "The maximum tokens per second that the endpoint can scale up to.",
"$ref": "#/$defs/int"
},
"min_provisioned_throughput": {
"description": "The minimum tokens per second that the endpoint can scale down to.",
"$ref": "#/$defs/int"
},
"model_name": {
"description": "The name of the model in Databricks Model Registry to be served or if the model resides in Unity Catalog, the full name of model,\nin the form of __catalog_name__.__schema_name__.__model_name__.\n",
"$ref": "#/$defs/string"
},
"model_version": {
"description": "The version of the model in Databricks Model Registry or Unity Catalog to be served.",
"$ref": "#/$defs/string"
},
"name": {
"description": "The name of a served model. It must be unique across an endpoint. If not specified, this field will default to \u003cmodel-name\u003e-\u003cmodel-version\u003e.\nA served model name can consist of alphanumeric characters, dashes, and underscores.\n",
"$ref": "#/$defs/string"
},
"scale_to_zero_enabled": {
"description": "Whether the compute resources for the served model should scale down to zero.",
"$ref": "#/$defs/bool"
},
"workload_size": {
"description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between.\nA single unit of provisioned concurrency can process one request at a time.\nValid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency).\nIf scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadSize",
"enum": [
"Small",
"Medium",
"Large"
]
},
"workload_type": {
"description": "The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is\n\"CPU\". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others.\nSee the available [GPU types](https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types).\n",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInputWorkloadType",
"enum": [
"CPU",
"GPU_SMALL",
"GPU_MEDIUM",
"GPU_LARGE",
"MULTIGPU_MEDIUM"
]
}
},
"additionalProperties": false,
"required": [
"model_name",
"model_version",
"scale_to_zero_enabled"
]
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.ServedModelInputWorkloadSize": {
"type": "string"
},
"serving.ServedModelInputWorkloadType": {
"type": "string"
},
"serving.TrafficConfig": {
"anyOf": [
{
"type": "object",
"properties": {
"routes": {
"description": "The list of routes that define traffic to each served entity.",
"$ref": "#/$defs/slice/github.com/databricks/databricks-sdk-go/service/serving.Route"
}
},
"additionalProperties": false
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
}
}
}
},
"int": {
"anyOf": [
{
"type": "integer"
},
{
"type": "string",
"pattern": "\\$\\{(resources(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(bundle(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(workspace(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(artifacts(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"int64": {
"anyOf": [
{
"type": "integer"
},
{
"type": "string",
"pattern": "\\$\\{(resources(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(bundle(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(workspace(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(artifacts(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"interface": {},
"map": {
"github.com": {
"databricks": {
"cli": {
"bundle": {
"config": {
"resources.Cluster": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Cluster"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Dashboard": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Dashboard"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"resources.Job": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Job"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.MlflowExperiment": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowExperiment"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.MlflowModel": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.MlflowModel"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.ModelServingEndpoint": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.ModelServingEndpoint"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Pipeline": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Pipeline"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.QualityMonitor": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.QualityMonitor"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.RegisteredModel": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.RegisteredModel"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Schema": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Schema"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"variable.TargetVariable": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.TargetVariable"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
}
]
},
"variable.Variable": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/variable.Variable"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
},
"config.Artifact": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Artifact"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Command": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Command"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"config.Target": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Target"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
}
}
}
},
"string": {
"anyOf": [
{
"type": "object",
"additionalProperties": {
"$ref": "#/$defs/string"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
},
"slice": {
"github.com": {
"databricks": {
"cli": {
"bundle": {
"config": {
"resources.Grant": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Grant"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"resources.Permission": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config/resources.Permission"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
},
"config.ArtifactFile": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.ArtifactFile"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
}
},
"databricks-sdk-go": {
"service": {
"catalog.MonitorMetric": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/catalog.MonitorMetric"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.InitScriptInfo": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"compute.Library": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Library"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobCluster": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobCluster"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobEnvironment": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobEnvironment"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobParameterDefinition": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobParameterDefinition"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.JobsHealthRule": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobsHealthRule"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.SqlTaskSubscription": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.SqlTaskSubscription"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.Task": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Task"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.TaskDependency": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.TaskDependency"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"jobs.Webhook": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.Webhook"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ExperimentTag": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/ml.ExperimentTag"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ModelTag": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/ml.ModelTag"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ModelVersion": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/ml.ModelVersion"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"ml.ModelVersionTag": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/ml.ModelVersionTag"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.IngestionConfig": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.IngestionConfig"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.Notifications": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.Notifications"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.PipelineCluster": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineCluster"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"pipelines.PipelineLibrary": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.PipelineLibrary"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Bump github.com/databricks/databricks-sdk-go from 0.47.0 to 0.48.0 (#1810) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.47.0 to 0.48.0. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/releases">github.com/databricks/databricks-sdk-go's releases</a>.</em></p> <blockquote> <h2>v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/databricks/databricks-sdk-go/blob/main/CHANGELOG.md">github.com/databricks/databricks-sdk-go's changelog</a>.</em></p> <blockquote> <h2>[Release] Release v0.48.0</h2> <h3>Internal Changes</h3> <ul> <li>Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/pull/1057">#1057</a>).</li> </ul> <p>Note: This release contains breaking changes, please see the API changes below for more details.</p> <h3>API Changes:</h3> <ul> <li>Added <code>DefaultSourceCodePath</code> and <code>Resources</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#App">apps.App</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#CreateAppRequest">apps.CreateAppRequest</a>.</li> <li>Added <code>Resources</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/apps#UpdateAppRequest">apps.UpdateAppRequest</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#CreatePipeline">pipelines.CreatePipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#EditPipeline">pipelines.EditPipeline</a>.</li> <li>Added <code>Schema</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/pipelines#PipelineSpec">pipelines.PipelineSpec</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsRequest">workspace.CreateCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#DeleteCredentialsRequest">workspace.DeleteCredentialsRequest</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsRequest">workspace.GetCredentialsRequest</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a>.</li> <li>[Breaking] Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ListCredentialsResponse">workspace.ListCredentialsResponse</a>.</li> <li>Changed <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>List</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GitCredentialsAPI">w.GitCredentials</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a>.</li> <li>Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoRequest">workspace.CreateRepoRequest</a>.</li> <li>[Breaking] Changed <code>Create</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateRepoResponse">workspace.CreateRepoResponse</a>.</li> <li>[Breaking] Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>Changed <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Delete</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetRepoResponse">workspace.GetRepoResponse</a>.</li> <li>Changed <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Get</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to return <code>any</code>.</li> <li>[Breaking] Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service . New request type is <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateRepoRequest">workspace.UpdateRepoRequest</a>.</li> <li>Changed <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service to type <code>Update</code> method for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#ReposAPI">w.Repos</a> workspace-level service.</li> <li>[Breaking] Changed <code>CredentialId</code> and <code>GitProvider</code> fields for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CreateCredentialsResponse">workspace.CreateCredentialsResponse</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#CredentialInfo">workspace.CredentialInfo</a> to be required.</li> <li>Changed <code>CredentialId</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#GetCredentialsResponse">workspace.GetCredentialsResponse</a> to be required.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckout">workspace.SparseCheckout</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>Changed <code>Patterns</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#SparseCheckoutUpdate">workspace.SparseCheckoutUpdate</a> to type <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#List">workspace.List</a>.</li> <li>[Breaking] Changed <code>GitProvider</code> field for <a href="https://pkg.go.dev/github.com/databricks/databricks-sdk-go/service/workspace#UpdateCredentialsRequest">workspace.UpdateCredentialsRequest</a> to be required.</li> </ul> <p>OpenAPI SHA: 0c86ea6dbd9a730c24ff0d4e509603e476955ac5, Date: 2024-10-02</p> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/80c963696b3c68e0b8aa4dc7dd55e16862a777c5"><code>80c9636</code></a> [Release] Release v0.48.0 (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1058">#1058</a>)</li> <li><a href="https://github.com/databricks/databricks-sdk-go/commit/6cecc224cbf7ceb85ca160d22fd27073899c2c40"><code>6cecc22</code></a> [Internal] Update SDK to latest OpenAPI spec (<a href="https://redirect.github.com/databricks/databricks-sdk-go/issues/1057">#1057</a>)</li> <li>See full diff in <a href="https://github.com/databricks/databricks-sdk-go/compare/v0.47.0...v0.48.0">compare view</a></li> </ul> </details> <br /> <details> <summary>Most Recent Ignore Conditions Applied to This Pull Request</summary> | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] | </details> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.47.0&new-version=0.48.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Nester <andrew.nester@databricks.com>
2024-10-07 13:21:05 +00:00
"serving.AiGatewayRateLimit": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.AiGatewayRateLimit"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
Make bundle JSON schema modular with `$defs` (#1700) ## Changes This PR makes sweeping changes to the way we generate and test the bundle JSON schema. The main benefits are: 1. More modular JSON schema. Every definition in the schema now is one level deep and points to references instead of inlining the entire schema for a field. This unblocks PyDABs from taking a dependency on the JSON schema. 2. Generate the JSON schema during CLI code generation. Directly stream it instead of computing it at runtime whenever a user calls `databricks bundle schema`. This is nice because we no longer need to embed a partial OpenAPI spec in the CLI. Down the line, we can add a `Schema()` method to every struct in the Databricks Go SDK and remove the dependency on the OpenAPI spec altogether. It'll become more important once we decouple Go SDK structs and methods from the underlying APIs. 3. Add enum values for Go SDK fields in the JSON schema. Better autocompletion and validation for these fields. As a follow-up, we can add enum values for non-Go SDK enums as well (created internal ticket to track). 4. Use "packageName.structName" as a key to read JSON schemas from the OpenAPI spec for Go SDK structs. Before, we would use an unrolled presentation of the JSON schema (stored in `bundle_descriptions.json`), which was complex to parse and include in the final JSON schema output. This also means loading values from the OpenAPI spec for `target` schema works automatically and no longer needs custom code. 5. Support recursive types (eg: `for_each_task`). With us now using $refs everywhere it's trivial to support. 6. Using complex variables would be invalid according to the schema generated before this PR. Now that bug is fixed. In the future adding more custom rules will be easier as well due to the single level nature of the JSON schema. Since this is a complete change of approach in how we generate the JSON schema, there are a few (very minor) regressions worth calling out. 1. We'll lose a few custom descriptions for non Go SDK structs that were a part of `bundle_descriptions.json`. Support for those can be added in the future as a followup. 2. Since now the final JSON schema is a static artefact, we lose some lead time for the signal that JSON schema integration tests are failing. It's okay though since we have a lot of coverage via the existing unit tests. ## Tests Unit tests. End to end tests are being added in this PR: https://github.com/databricks/cli/pull/1726 Previous unit tests were all deleted because they were bloated. Effort was made to make the new unit tests provide (almost) equivalent coverage.
2024-09-10 13:55:18 +00:00
"serving.EndpointTag": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.EndpointTag"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.RateLimit": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.RateLimit"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.Route": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.Route"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.ServedEntityInput": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedEntityInput"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
},
"serving.ServedModelInput": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
}
}
}
},
"string": {
"anyOf": [
{
"type": "array",
"items": {
"$ref": "#/$defs/string"
}
},
{
"type": "string",
"pattern": "\\$\\{(var(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)+)\\}"
}
]
}
},
"string": {
"type": "string"
}
},
"type": "object",
"properties": {
"artifacts": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Artifact"
},
"bundle": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Bundle"
},
"experimental": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Experimental"
},
"include": {
"$ref": "#/$defs/slice/string"
},
"permissions": {
"$ref": "#/$defs/slice/github.com/databricks/cli/bundle/config/resources.Permission"
},
"presets": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Presets"
},
"resources": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Resources"
},
"run_as": {
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.JobRunAs"
},
"sync": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Sync"
},
"targets": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config.Target"
},
"variables": {
"$ref": "#/$defs/map/github.com/databricks/cli/bundle/config/variable.Variable"
},
"workspace": {
"$ref": "#/$defs/github.com/databricks/cli/bundle/config.Workspace"
}
},
"additionalProperties": false
}