Disable OpenAPI annotations with broken markdown

This commit is contained in:
Ilya Kuznetsov 2025-02-24 19:03:31 +01:00
parent 6b0b3441a8
commit baf2f7eccd
No known key found for this signature in database
GPG Key ID: 91F3DDCF5D21CDDF
3 changed files with 42 additions and 29 deletions

View File

@ -899,11 +899,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
- - `log_analytics_primary_key`
- String
- <needs content added>
-
- - `log_analytics_workspace_id`
- String
- <needs content added>
-
:::
@ -1115,7 +1115,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
- - `abfss`
- Map
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#clustersnameinit_scriptsabfss).
- See [\_](#clustersnameinit_scriptsabfss).
- - `dbfs`
- Map
@ -1148,8 +1148,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
**`Type: Map`**
destination needs to be provided. e.g.
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
@ -1928,7 +1927,7 @@ In this minimal environment spec, only pip dependencies are supported.
- - `dependencies`
- Sequence
- List of pip dependencies, as supported by the version of pip in this environment. Each dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in Databricks), <vcs project url> E.g. dependencies: ["foo==0.0.1", "-r /Workspace/test/requirements.txt"]
-
:::
@ -2379,11 +2378,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
- - `log_analytics_primary_key`
- String
- <needs content added>
-
- - `log_analytics_workspace_id`
- String
- <needs content added>
-
:::
@ -2595,7 +2594,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
- - `abfss`
- Map
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#jobsnamejob_clustersnew_clusterinit_scriptsabfss).
- See [\_](#jobsnamejob_clustersnew_clusterinit_scriptsabfss).
- - `dbfs`
- Map
@ -2628,8 +2627,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
**`Type: Map`**
destination needs to be provided. e.g.
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
@ -3801,11 +3799,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
- - `log_analytics_primary_key`
- String
- <needs content added>
-
- - `log_analytics_workspace_id`
- String
- <needs content added>
-
:::
@ -4017,7 +4015,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
- - `abfss`
- Map
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#jobsnametasksnew_clusterinit_scriptsabfss).
- See [\_](#jobsnametasksnew_clusterinit_scriptsabfss).
- - `dbfs`
- Map
@ -4050,8 +4048,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
**`Type: Map`**
destination needs to be provided. e.g.
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
@ -6771,11 +6768,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
- - `log_analytics_primary_key`
- String
- <needs content added>
-
- - `log_analytics_workspace_id`
- String
- <needs content added>
-
:::
@ -6938,7 +6935,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
- - `abfss`
- Map
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#pipelinesnameclustersinit_scriptsabfss).
- See [\_](#pipelinesnameclustersinit_scriptsabfss).
- - `dbfs`
- Map
@ -6971,8 +6968,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
**`Type: Map`**
destination needs to be provided. e.g.
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
@ -8015,7 +8011,7 @@ Configuration for monitoring inference logs.
- - `granularities`
- Sequence
- Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``"<n> week(s)"``, ``"1 month"``, ``"1 year"``}.
-
- - `label_col`
- String
@ -8163,7 +8159,7 @@ Configuration for monitoring time series tables.
- - `granularities`
- Sequence
- Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``"<n> week(s)"``, ``"1 month"``, ``"1 year"``}.
-
- - `timestamp_col`
- String

View File

@ -566,3 +566,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
"model_version":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
"abfss":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.Environment:
"dependencies":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog:
"granularities":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries:
"granularities":
"description": |-
PLACEHOLDER
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
"log_analytics_primary_key":
"description": |-
PLACEHOLDER
"log_analytics_workspace_id":
"description": |-
PLACEHOLDER

View File

@ -2094,7 +2094,6 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"$ref": "#/$defs/slice/string"
},
"label_col": {
@ -2251,7 +2250,6 @@
"type": "object",
"properties": {
"granularities": {
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
"$ref": "#/$defs/slice/string"
},
"timestamp_col": {
@ -2732,7 +2730,6 @@
"$ref": "#/$defs/string"
},
"dependencies": {
"description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e\nE.g. dependencies: [\"foo==0.0.1\", \"-r /Workspace/test/requirements.txt\"]",
"$ref": "#/$defs/slice/string"
}
},
@ -2828,7 +2825,6 @@
"type": "object",
"properties": {
"abfss": {
"description": "destination needs to be provided. e.g.\n`{ \"abfss\" : { \"destination\" : \"abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e\" } }",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
},
"dbfs": {
@ -2936,11 +2932,9 @@
"type": "object",
"properties": {
"log_analytics_primary_key": {
"description": "\u003cneeds content added\u003e",
"$ref": "#/$defs/string"
},
"log_analytics_workspace_id": {
"description": "\u003cneeds content added\u003e",
"$ref": "#/$defs/string"
}
},