mirror of https://github.com/databricks/cli.git
Disable OpenAPI annotations with broken markdown
This commit is contained in:
parent
6b0b3441a8
commit
baf2f7eccd
|
@ -899,11 +899,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
|
||||||
|
|
||||||
- - `log_analytics_primary_key`
|
- - `log_analytics_primary_key`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
- - `log_analytics_workspace_id`
|
- - `log_analytics_workspace_id`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
@ -1115,7 +1115,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
- - `abfss`
|
- - `abfss`
|
||||||
- Map
|
- Map
|
||||||
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#clustersnameinit_scriptsabfss).
|
- See [\_](#clustersnameinit_scriptsabfss).
|
||||||
|
|
||||||
- - `dbfs`
|
- - `dbfs`
|
||||||
- Map
|
- Map
|
||||||
|
@ -1148,8 +1148,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
**`Type: Map`**
|
**`Type: Map`**
|
||||||
|
|
||||||
destination needs to be provided. e.g.
|
|
||||||
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -1928,7 +1927,7 @@ In this minimal environment spec, only pip dependencies are supported.
|
||||||
|
|
||||||
- - `dependencies`
|
- - `dependencies`
|
||||||
- Sequence
|
- Sequence
|
||||||
- List of pip dependencies, as supported by the version of pip in this environment. Each dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/ Allowed dependency could be <requirement specifier>, <archive url/path>, <local project path>(WSFS or Volumes in Databricks), <vcs project url> E.g. dependencies: ["foo==0.0.1", "-r /Workspace/test/requirements.txt"]
|
-
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
@ -2379,11 +2378,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
|
||||||
|
|
||||||
- - `log_analytics_primary_key`
|
- - `log_analytics_primary_key`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
- - `log_analytics_workspace_id`
|
- - `log_analytics_workspace_id`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
@ -2595,7 +2594,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
- - `abfss`
|
- - `abfss`
|
||||||
- Map
|
- Map
|
||||||
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#jobsnamejob_clustersnew_clusterinit_scriptsabfss).
|
- See [\_](#jobsnamejob_clustersnew_clusterinit_scriptsabfss).
|
||||||
|
|
||||||
- - `dbfs`
|
- - `dbfs`
|
||||||
- Map
|
- Map
|
||||||
|
@ -2628,8 +2627,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
**`Type: Map`**
|
**`Type: Map`**
|
||||||
|
|
||||||
destination needs to be provided. e.g.
|
|
||||||
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -3801,11 +3799,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
|
||||||
|
|
||||||
- - `log_analytics_primary_key`
|
- - `log_analytics_primary_key`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
- - `log_analytics_workspace_id`
|
- - `log_analytics_workspace_id`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
@ -4017,7 +4015,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
- - `abfss`
|
- - `abfss`
|
||||||
- Map
|
- Map
|
||||||
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#jobsnametasksnew_clusterinit_scriptsabfss).
|
- See [\_](#jobsnametasksnew_clusterinit_scriptsabfss).
|
||||||
|
|
||||||
- - `dbfs`
|
- - `dbfs`
|
||||||
- Map
|
- Map
|
||||||
|
@ -4050,8 +4048,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
**`Type: Map`**
|
**`Type: Map`**
|
||||||
|
|
||||||
destination needs to be provided. e.g.
|
|
||||||
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -6771,11 +6768,11 @@ Defines values necessary to configure and run Azure Log Analytics agent
|
||||||
|
|
||||||
- - `log_analytics_primary_key`
|
- - `log_analytics_primary_key`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
- - `log_analytics_workspace_id`
|
- - `log_analytics_workspace_id`
|
||||||
- String
|
- String
|
||||||
- <needs content added>
|
-
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
@ -6938,7 +6935,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
- - `abfss`
|
- - `abfss`
|
||||||
- Map
|
- Map
|
||||||
- destination needs to be provided. e.g. `{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }. See [\_](#pipelinesnameclustersinit_scriptsabfss).
|
- See [\_](#pipelinesnameclustersinit_scriptsabfss).
|
||||||
|
|
||||||
- - `dbfs`
|
- - `dbfs`
|
||||||
- Map
|
- Map
|
||||||
|
@ -6971,8 +6968,7 @@ The configuration for storing init scripts. Any number of destinations can be sp
|
||||||
|
|
||||||
**`Type: Map`**
|
**`Type: Map`**
|
||||||
|
|
||||||
destination needs to be provided. e.g.
|
|
||||||
`{ "abfss" : { "destination" : "abfss://<container-name>@<storage-account-name>.dfs.core.windows.net/<directory-name>" } }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -8015,7 +8011,7 @@ Configuration for monitoring inference logs.
|
||||||
|
|
||||||
- - `granularities`
|
- - `granularities`
|
||||||
- Sequence
|
- Sequence
|
||||||
- Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``"<n> week(s)"``, ``"1 month"``, ``"1 year"``}.
|
-
|
||||||
|
|
||||||
- - `label_col`
|
- - `label_col`
|
||||||
- String
|
- String
|
||||||
|
@ -8163,7 +8159,7 @@ Configuration for monitoring time series tables.
|
||||||
|
|
||||||
- - `granularities`
|
- - `granularities`
|
||||||
- Sequence
|
- Sequence
|
||||||
- Granularities for aggregating data into time windows based on their timestamp. Currently the following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, ``"1 day"``, ``"<n> week(s)"``, ``"1 month"``, ``"1 year"``}.
|
-
|
||||||
|
|
||||||
- - `timestamp_col`
|
- - `timestamp_col`
|
||||||
- String
|
- String
|
||||||
|
|
|
@ -566,3 +566,26 @@ github.com/databricks/databricks-sdk-go/service/serving.ServedModelInput:
|
||||||
"model_version":
|
"model_version":
|
||||||
"description": |-
|
"description": |-
|
||||||
PLACEHOLDER
|
PLACEHOLDER
|
||||||
|
github.com/databricks/databricks-sdk-go/service/compute.InitScriptInfo:
|
||||||
|
"abfss":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
github.com/databricks/databricks-sdk-go/service/compute.Environment:
|
||||||
|
"dependencies":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
github.com/databricks/databricks-sdk-go/service/catalog.MonitorInferenceLog:
|
||||||
|
"granularities":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
github.com/databricks/databricks-sdk-go/service/catalog.MonitorTimeSeries:
|
||||||
|
"granularities":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
github.com/databricks/databricks-sdk-go/service/compute.LogAnalyticsInfo:
|
||||||
|
"log_analytics_primary_key":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
"log_analytics_workspace_id":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
|
|
@ -2094,7 +2094,6 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"granularities": {
|
"granularities": {
|
||||||
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
|
|
||||||
"$ref": "#/$defs/slice/string"
|
"$ref": "#/$defs/slice/string"
|
||||||
},
|
},
|
||||||
"label_col": {
|
"label_col": {
|
||||||
|
@ -2251,7 +2250,6 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"granularities": {
|
"granularities": {
|
||||||
"description": "Granularities for aggregating data into time windows based on their timestamp. Currently the following static\ngranularities are supported:\n{``\"5 minutes\"``, ``\"30 minutes\"``, ``\"1 hour\"``, ``\"1 day\"``, ``\"\u003cn\u003e week(s)\"``, ``\"1 month\"``, ``\"1 year\"``}.\n",
|
|
||||||
"$ref": "#/$defs/slice/string"
|
"$ref": "#/$defs/slice/string"
|
||||||
},
|
},
|
||||||
"timestamp_col": {
|
"timestamp_col": {
|
||||||
|
@ -2732,7 +2730,6 @@
|
||||||
"$ref": "#/$defs/string"
|
"$ref": "#/$defs/string"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"description": "List of pip dependencies, as supported by the version of pip in this environment.\nEach dependency is a pip requirement file line https://pip.pypa.io/en/stable/reference/requirements-file-format/\nAllowed dependency could be \u003crequirement specifier\u003e, \u003carchive url/path\u003e, \u003clocal project path\u003e(WSFS or Volumes in Databricks), \u003cvcs project url\u003e\nE.g. dependencies: [\"foo==0.0.1\", \"-r /Workspace/test/requirements.txt\"]",
|
|
||||||
"$ref": "#/$defs/slice/string"
|
"$ref": "#/$defs/slice/string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -2828,7 +2825,6 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"abfss": {
|
"abfss": {
|
||||||
"description": "destination needs to be provided. e.g.\n`{ \"abfss\" : { \"destination\" : \"abfss://\u003ccontainer-name\u003e@\u003cstorage-account-name\u003e.dfs.core.windows.net/\u003cdirectory-name\u003e\" } }",
|
|
||||||
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
|
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/compute.Adlsgen2Info"
|
||||||
},
|
},
|
||||||
"dbfs": {
|
"dbfs": {
|
||||||
|
@ -2936,11 +2932,9 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"log_analytics_primary_key": {
|
"log_analytics_primary_key": {
|
||||||
"description": "\u003cneeds content added\u003e",
|
|
||||||
"$ref": "#/$defs/string"
|
"$ref": "#/$defs/string"
|
||||||
},
|
},
|
||||||
"log_analytics_workspace_id": {
|
"log_analytics_workspace_id": {
|
||||||
"description": "\u003cneeds content added\u003e",
|
|
||||||
"$ref": "#/$defs/string"
|
"$ref": "#/$defs/string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
Loading…
Reference in New Issue