From 8355d562f1a22118899e6e69baba9da245ffa9bc Mon Sep 17 00:00:00 2001 From: Elastic Machine Date: Wed, 31 Jul 2024 23:45:10 +1000 Subject: [PATCH] Auto-generated code for main (#2608) Co-authored-by: Quentin Pradet --- elasticsearch/_async/client/__init__.py | 67 ++-- elasticsearch/_async/client/cat.py | 95 ++--- elasticsearch/_async/client/cluster.py | 35 +- elasticsearch/_async/client/enrich.py | 10 +- elasticsearch/_async/client/esql.py | 35 +- elasticsearch/_async/client/indices.py | 125 ++++--- elasticsearch/_async/client/license.py | 6 +- elasticsearch/_async/client/ml.py | 20 +- elasticsearch/_async/client/query_rules.py | 14 +- elasticsearch/_async/client/security.py | 384 ++++++++++++++++++--- elasticsearch/_async/client/tasks.py | 3 +- elasticsearch/_async/client/transform.py | 124 +++---- elasticsearch/_sync/client/__init__.py | 67 ++-- elasticsearch/_sync/client/cat.py | 95 ++--- elasticsearch/_sync/client/cluster.py | 35 +- elasticsearch/_sync/client/enrich.py | 10 +- elasticsearch/_sync/client/esql.py | 35 +- elasticsearch/_sync/client/indices.py | 125 ++++--- elasticsearch/_sync/client/license.py | 6 +- elasticsearch/_sync/client/ml.py | 20 +- elasticsearch/_sync/client/query_rules.py | 14 +- elasticsearch/_sync/client/security.py | 384 ++++++++++++++++++--- elasticsearch/_sync/client/tasks.py | 3 +- elasticsearch/_sync/client/transform.py | 124 +++---- 24 files changed, 1280 insertions(+), 556 deletions(-) diff --git a/elasticsearch/_async/client/__init__.py b/elasticsearch/_async/client/__init__.py index 5b2979f154..e0f820c6d4 100644 --- a/elasticsearch/_async/client/__init__.py +++ b/elasticsearch/_async/client/__init__.py @@ -638,8 +638,9 @@ async def bulk( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs multiple indexing or delete operations in a single API call. This reduces - overhead and can greatly increase indexing speed. + Bulk index or delete documents. Performs multiple indexing or delete operations + in a single API call. This reduces overhead and can greatly increase indexing + speed. ``_ @@ -995,9 +996,9 @@ async def create( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -1101,7 +1102,7 @@ async def delete( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a JSON document from the specified index. + Delete a document. Removes a JSON document from the specified index. ``_ @@ -1225,7 +1226,7 @@ async def delete_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes documents that match the specified query. + Delete documents. Deletes documents that match the specified query. ``_ @@ -1451,7 +1452,7 @@ async def delete_script( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a stored script or search template. + Delete a script or search template. Deletes a stored script or search template. ``_ @@ -1519,7 +1520,7 @@ async def exists( ] = None, ) -> HeadApiResponse: """ - Checks if a document in an index exists. + Check a document. Checks if a specified document exists. ``_ @@ -1620,7 +1621,7 @@ async def exists_source( ] = None, ) -> HeadApiResponse: """ - Checks if a document's `_source` is stored. + Check for a document source. Checks if a document's `_source` is stored. ``_ @@ -1720,8 +1721,8 @@ async def explain( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about why a specific document matches (or doesn’t match) - a query. + Explain a document match result. Returns information about why a specific document + matches, or doesn’t match, a query. ``_ @@ -1959,7 +1960,8 @@ async def get( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns a document. + Get a document by its ID. Retrieves the document with the specified ID from an + index. ``_ @@ -2050,7 +2052,7 @@ async def get_script( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a stored script or search template. + Get a script or search template. Retrieves a stored script or search template. ``_ @@ -2182,7 +2184,7 @@ async def get_source( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns the source of a document. + Get a document's source. Returns the source of a document. ``_ @@ -2340,9 +2342,9 @@ async def index( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -2451,7 +2453,7 @@ async def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns basic information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ """ @@ -3131,7 +3133,8 @@ async def put_script( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a stored script or search template. + Create or update a script or search template. Creates or updates a stored script + or search template. ``_ @@ -3311,9 +3314,9 @@ async def reindex( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to copy documents from one index to another, optionally filtering the - source documents by a query, changing the destination index settings, or fetching - the documents from a remote cluster. + Reindex documents. Copies documents from a source to a destination. The source + can be any existing index, alias, or data stream. The destination must differ + from the source. For example, you cannot reindex a data stream into itself. ``_ @@ -3528,7 +3531,7 @@ async def scripts_painless_execute( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a script and returns a result. + Run a script. Runs a script and returns a result. ``_ @@ -4198,8 +4201,7 @@ async def search_mvt( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> BinaryApiResponse: """ - Searches a vector tile for geospatial values. Returns results as a binary Mapbox - vector tile. + Search a vector tile. Searches a vector tile for geospatial values. ``_ @@ -4687,8 +4689,8 @@ async def termvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information and statistics about terms in the fields of a particular - document. + Get term vector information. Returns information and statistics about terms in + the fields of a particular document. ``_ @@ -4830,7 +4832,8 @@ async def update( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates a document with a script or partial document. + Update a document. Updates a document by running a script or passing a partial + document. ``_ @@ -4995,9 +4998,9 @@ async def update_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates documents that match the specified query. If no query is specified, performs - an update on every document in the data stream or index without modifying the - source, which is useful for picking up mapping changes. + Update documents. Updates documents that match the specified query. If no query + is specified, performs an update on every document in the data stream or index + without modifying the source, which is useful for picking up mapping changes. ``_ diff --git a/elasticsearch/_async/client/cat.py b/elasticsearch/_async/client/cat.py index 3035c93b7f..e0e738749f 100644 --- a/elasticsearch/_async/client/cat.py +++ b/elasticsearch/_async/client/cat.py @@ -53,11 +53,11 @@ async def aliases( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Retrieves the cluster’s index aliases, including filter and routing information. - The API does not return data stream aliases. IMPORTANT: cat APIs are only intended + Get aliases. Retrieves the cluster’s index aliases, including filter and routing + information. The API does not return data stream aliases. CAT APIs are only intended for human consumption using the command line or the Kibana console. They are - not intended for use by applications. For application consumption, use the aliases - API. + not intended for use by applications. For application consumption, use the /_alias + endpoints. ``_ @@ -231,11 +231,12 @@ async def component_templates( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns information about component templates in a cluster. Component templates - are building blocks for constructing index templates that specify index mappings, - settings, and aliases. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get component template API. + Get component templates. Returns information about component templates in a cluster. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. CAT APIs are only intended for + human consumption using the command line or Kibana console. They are not intended + for use by applications. For application consumption, use the /_component_template + endpoints. ``_ @@ -316,12 +317,12 @@ async def count( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Provides quick access to a document count for a data stream, an index, or an - entire cluster. NOTE: The document count only includes live documents, not deleted - documents which have not yet been removed by the merge process. IMPORTANT: cat + Get a document count. Provides quick access to a document count for a data stream, + an index, or an entire cluster.n/ The document count only includes live documents, + not deleted documents which have not yet been removed by the merge process. CAT APIs are only intended for human consumption using the command line or Kibana console. They are not intended for use by applications. For application consumption, - use the count API. + use /_count endpoints. ``_ @@ -585,7 +586,7 @@ async def help( v: t.Optional[bool] = None, ) -> TextApiResponse: """ - Returns help for the Cat APIs. + Get CAT help. Returns help for the CAT APIs. ``_ @@ -676,16 +677,16 @@ async def indices( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns high-level information about indices in a cluster, including backing - indices for data streams. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get index API. Use the cat indices API to - get the following information for each index in a cluster: shard count; document - count; deleted document count; primary store size; total store size of all shards, - including shard replicas. These metrics are retrieved directly from Lucene, which - Elasticsearch uses internally to power indexing and search. As a result, all - document counts include hidden nested documents. To get an accurate count of - Elasticsearch documents, use the cat count or count APIs. + Get index information. Returns high-level information about indices in a cluster, + including backing indices for data streams. Use this request to get the following + information for each index in a cluster: - shard count - document count - deleted + document count - primary store size - total store size of all shards, including + shard replicas These metrics are retrieved directly from Lucene, which Elasticsearch + uses internally to power indexing and search. As a result, all document counts + include hidden nested documents. To get an accurate count of Elasticsearch documents, + use the /_cat/count or _count endpoints. CAT APIs are only intended for human + consumption using the command line or Kibana console. They are not intended for + use by applications. For application consumption, use an index endpoint. ``_ @@ -894,10 +895,10 @@ async def ml_data_frame_analytics( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about data frame analytics jobs. - IMPORTANT: cat APIs are only intended for human consumption using the Kibana - console or command line. They are not intended for use by applications. For application - consumption, use the get data frame analytics jobs statistics API. + Get data frame analytics jobs. Returns configuration and usage information about + data frame analytics jobs. CAT APIs are only intended for human consumption using + the Kibana console or command line. They are not intended for use by applications. + For application consumption, use the /_ml/data_frame/analytics endpoints. ``_ @@ -1016,12 +1017,12 @@ async def ml_datafeeds( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about datafeeds. This API returns - a maximum of 10,000 datafeeds. If the Elasticsearch security features are enabled, - you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges - to use this API. IMPORTANT: cat APIs are only intended for human consumption + Get datafeeds. Returns configuration and usage information about datafeeds. This + API returns a maximum of 10,000 datafeeds. If the Elasticsearch security features + are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` + cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. - For application consumption, use the get datafeed statistics API. + For application consumption, use the /_ml/datafeeds endpoints. ``_ @@ -1146,13 +1147,13 @@ async def ml_jobs( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information for anomaly detection jobs. This - API returns a maximum of 10,000 jobs. If the Elasticsearch security features - are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` - cluster privileges to use this API. IMPORTANT: cat APIs are only intended for + Get anomaly detection jobs. Returns configuration and usage information for anomaly + detection jobs. This API returns a maximum of 10,000 jobs. If the Elasticsearch + security features are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, + or `manage` cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended - for use by applications. For application consumption, use the get anomaly detection - job statistics API. + for use by applications. For application consumption, use the /_ml/anomaly_detectors + endpoints. ``_ @@ -1280,10 +1281,10 @@ async def ml_trained_models( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about inference trained models. IMPORTANT: - cat APIs are only intended for human consumption using the Kibana console or - command line. They are not intended for use by applications. For application - consumption, use the get trained models statistics API. + Get trained models. Returns configuration and usage information about inference + trained models. CAT APIs are only intended for human consumption using the Kibana + console or command line. They are not intended for use by applications. For application + consumption, use the /_ml/trained_models endpoints. ``_ @@ -2458,10 +2459,10 @@ async def transforms( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about transforms. IMPORTANT: cat - APIs are only intended for human consumption using the Kibana console or command - line. They are not intended for use by applications. For application consumption, - use the get transform statistics API. + Get transforms. Returns configuration and usage information about transforms. + CAT APIs are only intended for human consumption using the Kibana console or + command line. They are not intended for use by applications. For application + consumption, use the /_transform endpoints. ``_ diff --git a/elasticsearch/_async/client/cluster.py b/elasticsearch/_async/client/cluster.py index c5de06ab82..838fc89d8b 100644 --- a/elasticsearch/_async/client/cluster.py +++ b/elasticsearch/_async/client/cluster.py @@ -115,8 +115,9 @@ async def delete_component_template( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes component templates. Component templates are building blocks for constructing - index templates that specify index mappings, settings, and aliases. + Delete component templates. Deletes component templates. Component templates + are building blocks for constructing index templates that specify index mappings, + settings, and aliases. ``_ @@ -215,7 +216,8 @@ async def exists_component_template( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Returns information about whether a particular component template exist + Check component templates. Returns information about whether a particular component + template exists. ``_ @@ -272,7 +274,7 @@ async def get_component_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about component templates. + Get component templates. Retrieves information about component templates. ``_ @@ -536,7 +538,7 @@ async def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns different information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ @@ -699,17 +701,18 @@ async def put_component_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a component template. Component templates are building blocks - for constructing index templates that specify index mappings, settings, and aliases. - An index template can be composed of multiple component templates. To use a component - template, specify it in an index template’s `composed_of` list. Component templates - are only applied to new data streams and indices as part of a matching index - template. Settings and mappings specified directly in the index template or the - create index request override any settings or mappings specified in a component - template. Component templates are only used during index creation. For data streams, - this includes data stream creation and the creation of a stream’s backing indices. - Changes to component templates do not affect existing indices, including a stream’s - backing indices. You can use C-style `/* *\\/` block comments in component templates. + Create or update a component template. Creates or updates a component template. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. An index template can be composed + of multiple component templates. To use a component template, specify it in an + index template’s `composed_of` list. Component templates are only applied to + new data streams and indices as part of a matching index template. Settings and + mappings specified directly in the index template or the create index request + override any settings or mappings specified in a component template. Component + templates are only used during index creation. For data streams, this includes + data stream creation and the creation of a stream’s backing indices. Changes + to component templates do not affect existing indices, including a stream’s backing + indices. You can use C-style `/* *\\/` block comments in component templates. You can include comments anywhere in the request body except before the opening curly bracket. diff --git a/elasticsearch/_async/client/enrich.py b/elasticsearch/_async/client/enrich.py index 63c7e8ed9a..6663826b97 100644 --- a/elasticsearch/_async/client/enrich.py +++ b/elasticsearch/_async/client/enrich.py @@ -36,7 +36,7 @@ async def delete_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing enrich policy and its enrich index. + Delete an enrich policy. Deletes an existing enrich policy and its enrich index. ``_ @@ -121,7 +121,7 @@ async def get_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about an enrich policy. + Get an enrich policy. Returns information about an enrich policy. ``_ @@ -171,7 +171,7 @@ async def put_policy( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an enrich policy. + Create an enrich policy. Creates an enrich policy. ``_ @@ -224,8 +224,8 @@ async def stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns enrich coordinator statistics and information about enrich policies that - are currently executing. + Get enrich stats. Returns enrich coordinator statistics and information about + enrich policies that are currently executing. ``_ """ diff --git a/elasticsearch/_async/client/esql.py b/elasticsearch/_async/client/esql.py index 99739035e0..d0849643e7 100644 --- a/elasticsearch/_async/client/esql.py +++ b/elasticsearch/_async/client/esql.py @@ -26,7 +26,15 @@ class EsqlClient(NamespacedClient): @_rewrite_parameters( - body_fields=("query", "columnar", "filter", "locale", "params"), + body_fields=( + "query", + "columnar", + "filter", + "locale", + "params", + "profile", + "tables", + ), ignore_deprecated_options={"params"}, ) async def query( @@ -35,14 +43,21 @@ async def query( query: t.Optional[str] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, + drop_null_columns: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, locale: t.Optional[str] = None, - params: t.Optional[t.Sequence[t.Union[None, bool, float, int, str]]] = None, + params: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, pretty: t.Optional[bool] = None, + profile: t.Optional[bool] = None, + tables: t.Optional[ + t.Mapping[str, t.Mapping[str, t.Mapping[str, t.Any]]] + ] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -58,6 +73,10 @@ async def query( row represents all the values of a certain column in the results. :param delimiter: The character to use between values within a CSV row. Only valid for the CSV format. + :param drop_null_columns: Should columns that are entirely `null` be removed + from the `columns` and `values` portion of the results? Defaults to `false`. + If `true` then the response will include an extra section under the name + `all_columns` which has the name of all columns. :param filter: Specify a Query DSL query in the filter parameter to filter the set of documents that an ES|QL query runs on. :param format: A short version of the Accept header, e.g. json, yaml. @@ -65,6 +84,12 @@ async def query( :param params: To avoid any attempts of hacking or code injection, extract the values in a separate list of parameters. Use question mark placeholders (?) in the query string for each of the parameters. + :param profile: If provided and `true` the response will include an extra `profile` + object with information on how the query was executed. This information is + for human debugging and its format can change at any time but it can give + some insight into the performance of each part of the query. + :param tables: Tables to use with the LOOKUP operation. The top level key is + the table name and the next level key is the column name. """ if query is None and body is None: raise ValueError("Empty value passed for parameter 'query'") @@ -74,6 +99,8 @@ async def query( __body: t.Dict[str, t.Any] = body if body is not None else {} if delimiter is not None: __query["delimiter"] = delimiter + if drop_null_columns is not None: + __query["drop_null_columns"] = drop_null_columns if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -95,6 +122,10 @@ async def query( __body["locale"] = locale if params is not None: __body["params"] = params + if profile is not None: + __body["profile"] = profile + if tables is not None: + __body["tables"] = tables __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "POST", diff --git a/elasticsearch/_async/client/indices.py b/elasticsearch/_async/client/indices.py index 392b054abd..97840941dc 100644 --- a/elasticsearch/_async/client/indices.py +++ b/elasticsearch/_async/client/indices.py @@ -51,7 +51,8 @@ async def add_block( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a block to an index. + Add an index block. Limits the operations allowed on an index by blocking specific + operation types. ``_ @@ -500,7 +501,7 @@ async def create( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index. + Create an index. Creates a new index. ``_ @@ -571,8 +572,8 @@ async def create_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a data stream. You must have a matching index template with data stream - enabled. + Create a data stream. Creates a data stream. You must have a matching index template + with data stream enabled. ``_ @@ -624,7 +625,7 @@ async def data_streams_stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves statistics for one or more data streams. + Get data stream stats. Retrieves statistics for one or more data streams. ``_ @@ -687,7 +688,7 @@ async def delete( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more indices. + Delete indices. Deletes one or more indices. ``_ @@ -759,7 +760,7 @@ async def delete_alias( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a data stream or index from an alias. + Delete an alias. Removes a data stream or index from an alias. ``_ @@ -825,8 +826,8 @@ async def delete_data_lifecycle( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes the data lifecycle from a data stream rendering it not managed by the - data stream lifecycle + Delete data stream lifecycles. Removes the data stream lifecycle from a data + stream, rendering it not managed by the data stream lifecycle. ``_ @@ -885,7 +886,7 @@ async def delete_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more data streams and their backing indices. + Delete data streams. Deletes one or more data streams and their backing indices. ``_ @@ -934,9 +935,10 @@ async def delete_index_template( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - The provided may contain multiple template names separated by - a comma. If multiple template names are specified then there is no wildcard support - and the provided names should match completely with existing templates. + Delete an index template. The provided may contain multiple + template names separated by a comma. If multiple template names are specified + then there is no wildcard support and the provided names should match completely + with existing templates. ``_ @@ -1195,7 +1197,8 @@ async def exists( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if a data stream, index, or alias exists. + Check indices. Checks if one or more indices, index aliases, or data streams + exist. ``_ @@ -1273,7 +1276,7 @@ async def exists_alias( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if an alias exists. + Check aliases. Checks if one or more data stream or index aliases exist. ``_ @@ -1396,7 +1399,8 @@ async def exists_template( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Returns information about whether a particular index template exists. + Check existence of index templates. Returns information about whether a particular + index template exists. ``_ @@ -1450,8 +1454,10 @@ async def explain_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about the index's current data stream lifecycle, such as - any potential encountered error, time since creation etc. + Get the status for a data stream lifecycle. Retrieves information about an index + or data stream’s current data stream lifecycle status, such as time since index + creation, time since rollover, the lifecycle configuration managing the index, + or any errors encountered during lifecycle execution. ``_ @@ -1784,8 +1790,8 @@ async def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about one or more indices. For data streams, the API returns - information about the stream’s backing indices. + Get index information. Returns information about one or more indices. For data + streams, the API returns information about the stream’s backing indices. ``_ @@ -1874,7 +1880,7 @@ async def get_alias( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more aliases. + Get aliases. Retrieves information for one or more data stream or index aliases. ``_ @@ -1955,7 +1961,8 @@ async def get_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the data stream lifecycle configuration of one or more data streams. + Get data stream lifecycles. Retrieves the data stream lifecycle configuration + of one or more data streams. ``_ @@ -2014,7 +2021,7 @@ async def get_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about one or more data streams. + Get data streams. Retrieves information about one or more data streams. ``_ @@ -2080,8 +2087,8 @@ async def get_field_mapping( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves mapping definitions for one or more fields. For data streams, the API - retrieves field mappings for the stream’s backing indices. + Get mapping definitions. Retrieves mapping definitions for one or more fields. + For data streams, the API retrieves field mappings for the stream’s backing indices. ``_ @@ -2160,7 +2167,7 @@ async def get_index_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about one or more index templates. + Get index templates. Returns information about one or more index templates. ``_ @@ -2235,8 +2242,8 @@ async def get_mapping( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves mapping definitions for one or more indices. For data streams, the - API retrieves mappings for the stream’s backing indices. + Get mapping definitions. Retrieves mapping definitions for one or more indices. + For data streams, the API retrieves mappings for the stream’s backing indices. ``_ @@ -2322,8 +2329,8 @@ async def get_settings( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns setting information for one or more indices. For data streams, returns - setting information for the stream’s backing indices. + Get index settings. Returns setting information for one or more indices. For + data streams, returns setting information for the stream’s backing indices. ``_ @@ -2412,7 +2419,7 @@ async def get_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about one or more index templates. + Get index templates. Retrieves information about one or more index templates. ``_ @@ -2469,14 +2476,14 @@ async def migrate_to_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Converts an index alias to a data stream. You must have a matching index template - that is data stream enabled. The alias must meet the following criteria: The - alias must have a write index; All indices for the alias must have a `@timestamp` - field mapping of a `date` or `date_nanos` field type; The alias must not have - any filters; The alias must not use custom routing. If successful, the request - removes the alias and creates a data stream with the same name. The indices for - the alias become hidden backing indices for the stream. The write index for the - alias becomes the write index for the stream. + Convert an index alias to a data stream. Converts an index alias to a data stream. + You must have a matching index template that is data stream enabled. The alias + must meet the following criteria: The alias must have a write index; All indices + for the alias must have a `@timestamp` field mapping of a `date` or `date_nanos` + field type; The alias must not have any filters; The alias must not use custom + routing. If successful, the request removes the alias and creates a data stream + with the same name. The indices for the alias become hidden backing indices for + the stream. The write index for the alias becomes the write index for the stream. ``_ @@ -2519,7 +2526,8 @@ async def modify_data_stream( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs one or more data stream modification actions in a single atomic operation. + Update data streams. Performs one or more data stream modification actions in + a single atomic operation. ``_ @@ -2715,7 +2723,7 @@ async def put_alias( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -2821,7 +2829,8 @@ async def put_data_lifecycle( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Update the data lifecycle of the specified data streams. + Update data stream lifecycles. Update the data stream lifecycle of the specified + data streams. ``_ @@ -2924,7 +2933,7 @@ async def put_index_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates an index template. Index templates define settings, mappings, + Create or update an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. ``_ @@ -3086,9 +3095,9 @@ async def put_mapping( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds new fields to an existing data stream or index. You can also use this API - to change the search settings of existing fields. For data streams, these changes - are applied to all backing indices by default. + Update field mappings. Adds new fields to an existing data stream or index. You + can also use this API to change the search settings of existing fields. For data + streams, these changes are applied to all backing indices by default. ``_ @@ -3219,8 +3228,8 @@ async def put_settings( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Changes a dynamic index setting in real time. For data streams, index setting - changes are applied to all backing indices by default. + Update index settings. Changes dynamic index settings in real time. For data + streams, index setting changes are applied to all backing indices by default. ``_ @@ -3326,7 +3335,7 @@ async def put_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates an index template. Index templates define settings, mappings, + Create or update an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. ``_ @@ -3471,9 +3480,9 @@ async def refresh( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - A refresh makes recent operations performed on one or more indices available - for search. For data streams, the API runs the refresh operation on the stream’s - backing indices. + Refresh an index. A refresh makes recent operations performed on one or more + indices available for search. For data streams, the API runs the refresh operation + on the stream’s backing indices. ``_ @@ -3745,7 +3754,7 @@ async def rollover( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index for a data stream or index alias. + Roll over to a new index. Creates a new index for a data stream or index alias. ``_ @@ -4081,7 +4090,8 @@ async def simulate_index_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - + Simulate an index. Returns the index configuration that would be applied to the + specified index from an existing index template. ``_ @@ -4160,7 +4170,8 @@ async def simulate_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns the index configuration that would be applied by a particular index template. + Simulate an index template. Returns the index configuration that would be applied + by a particular index template. ``_ @@ -4567,7 +4578,7 @@ async def update_aliases( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -4642,7 +4653,7 @@ async def validate_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Validates a potentially expensive query without executing it. + Validate a query. Validates a query without running it. ``_ diff --git a/elasticsearch/_async/client/license.py b/elasticsearch/_async/client/license.py index 6a2265446b..a43d7064d7 100644 --- a/elasticsearch/_async/client/license.py +++ b/elasticsearch/_async/client/license.py @@ -72,9 +72,9 @@ async def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - This API returns information about the type of license, when it was issued, and - when it expires, for example. For more information about the different types - of licenses, see https://www.elastic.co/subscriptions. + Get license information. Returns information about your Elastic license, including + its type, its status, when it was issued, and when it expires. For more information + about the different types of licenses, refer to [Elastic Stack subscriptions](https://www.elastic.co/subscriptions). ``_ diff --git a/elasticsearch/_async/client/ml.py b/elasticsearch/_async/client/ml.py index 42cf6df006..258d060d2f 100644 --- a/elasticsearch/_async/client/ml.py +++ b/elasticsearch/_async/client/ml.py @@ -88,7 +88,7 @@ async def close_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Close anomaly detection jobs A job can be opened and closed multiple times throughout + Close anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating @@ -579,7 +579,7 @@ async def delete_job( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an anomaly detection job. All job configuration, model state and results + Delete an anomaly detection job. All job configuration, model state and results are deleted. It is not currently possible to delete multiple jobs using wildcards or a comma separated list. If you delete a job that has a datafeed, the request first tries to delete the datafeed. This behavior is equivalent to calling the @@ -2739,12 +2739,12 @@ async def open_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Opens one or more anomaly detection jobs. An anomaly detection job must be opened - in order for it to be ready to receive and analyze data. It can be opened and - closed multiple times throughout its lifecycle. When you open a new job, it starts - with an empty model. When you open an existing job, the most recent model state - is automatically loaded. The job is ready to resume its analysis from where it - left off, once new data is received. + Open anomaly detection jobs. An anomaly detection job must be opened in order + for it to be ready to receive and analyze data. It can be opened and closed multiple + times throughout its lifecycle. When you open a new job, it starts with an empty + model. When you open an existing job, the most recent model state is automatically + loaded. The job is ready to resume its analysis from where it left off, once + new data is received. ``_ @@ -3612,8 +3612,8 @@ async def put_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Instantiates an anomaly detection job. If you include a `datafeed_config`, you - must have read index privileges on the source index. + Create an anomaly detection job. If you include a `datafeed_config`, you must + have read index privileges on the source index. ``_ diff --git a/elasticsearch/_async/client/query_rules.py b/elasticsearch/_async/client/query_rules.py index 4a64f8b2df..17ee1dbcd5 100644 --- a/elasticsearch/_async/client/query_rules.py +++ b/elasticsearch/_async/client/query_rules.py @@ -250,7 +250,7 @@ async def list_rulesets( ) @_rewrite_parameters( - body_fields=("actions", "criteria", "type"), + body_fields=("actions", "criteria", "type", "priority"), ) async def put_rule( self, @@ -258,12 +258,15 @@ async def put_rule( ruleset_id: str, rule_id: str, actions: t.Optional[t.Mapping[str, t.Any]] = None, - criteria: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + criteria: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, type: t.Optional[t.Union["t.Literal['pinned']", str]] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, + priority: t.Optional[int] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -278,6 +281,7 @@ async def put_rule( :param actions: :param criteria: :param type: + :param priority: """ if ruleset_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'ruleset_id'") @@ -311,6 +315,8 @@ async def put_rule( __body["criteria"] = criteria if type is not None: __body["type"] = type + if priority is not None: + __body["priority"] = priority __headers = {"accept": "application/json", "content-type": "application/json"} return await self.perform_request( # type: ignore[return-value] "PUT", @@ -329,7 +335,9 @@ async def put_ruleset( self, *, ruleset_id: str, - rules: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + rules: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, diff --git a/elasticsearch/_async/client/security.py b/elasticsearch/_async/client/security.py index bb032362cb..7a5a210ab4 100644 --- a/elasticsearch/_async/client/security.py +++ b/elasticsearch/_async/client/security.py @@ -97,12 +97,12 @@ async def authenticate( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Enables you to submit a request with a basic auth header to authenticate a user - and retrieve information about the authenticated user. A successful call returns - a JSON structure that shows user information such as their username, the roles - that are assigned to the user, any assigned metadata, and information about the - realms that authenticated and authorized the user. If the user cannot be authenticated, - this API returns a 401 status code. + Authenticate a user. Authenticates a user and returns information about the authenticated + user. Include the user information in a [basic auth header](https://en.wikipedia.org/wiki/Basic_access_authentication). + A successful call returns a JSON structure that shows user information such as + their username, the roles that are assigned to the user, any assigned metadata, + and information about the realms that authenticated and authorized the user. + If the user cannot be authenticated, this API returns a 401 status code. ``_ """ @@ -127,6 +127,122 @@ async def authenticate( path_parts=__path_parts, ) + @_rewrite_parameters( + body_fields=("names",), + ) + async def bulk_delete_role( + self, + *, + names: t.Optional[t.Sequence[str]] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + refresh: t.Optional[ + t.Union["t.Literal['false', 'true', 'wait_for']", bool, str] + ] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + The role management APIs are generally the preferred way to manage roles, rather + than using file-based role management. The bulk delete roles API cannot delete + roles that are defined in roles files. + + ``_ + + :param names: An array of role names to delete + :param refresh: If `true` (the default) then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh to + make this operation visible to search, if `false` then do nothing with refreshes. + """ + if names is None and body is None: + raise ValueError("Empty value passed for parameter 'names'") + __path_parts: t.Dict[str, str] = {} + __path = "/_security/role" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if refresh is not None: + __query["refresh"] = refresh + if not __body: + if names is not None: + __body["names"] = names + __headers = {"accept": "application/json", "content-type": "application/json"} + return await self.perform_request( # type: ignore[return-value] + "DELETE", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.bulk_delete_role", + path_parts=__path_parts, + ) + + @_rewrite_parameters( + body_fields=("roles",), + ) + async def bulk_put_role( + self, + *, + roles: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + refresh: t.Optional[ + t.Union["t.Literal['false', 'true', 'wait_for']", bool, str] + ] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + The role management APIs are generally the preferred way to manage roles, rather + than using file-based role management. The bulk create or update roles API cannot + update roles that are defined in roles files. + + ``_ + + :param roles: A dictionary of role name to RoleDescriptor objects to add or update + :param refresh: If `true` (the default) then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh to + make this operation visible to search, if `false` then do nothing with refreshes. + """ + if roles is None and body is None: + raise ValueError("Empty value passed for parameter 'roles'") + __path_parts: t.Dict[str, str] = {} + __path = "/_security/role" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if refresh is not None: + __query["refresh"] = refresh + if not __body: + if roles is not None: + __body["roles"] = roles + __headers = {"accept": "application/json", "content-type": "application/json"} + return await self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.bulk_put_role", + path_parts=__path_parts, + ) + @_rewrite_parameters( body_fields=("password", "password_hash"), ) @@ -436,11 +552,11 @@ async def create_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an API key for access without requiring basic authentication. A successful - request returns a JSON structure that contains the API key, its unique id, and - its name. If applicable, it also returns expiration information for the API key - in milliseconds. NOTE: By default, API keys never expire. You can specify expiration - information when you create the API keys. + Create an API key. Creates an API key for access without requiring basic authentication. + A successful request returns a JSON structure that contains the API key, its + unique id, and its name. If applicable, it also returns expiration information + for the API key in milliseconds. NOTE: By default, API keys never expire. You + can specify expiration information when you create the API keys. ``_ @@ -1109,10 +1225,11 @@ async def get_api_key( with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more API keys. NOTE: If you have only the `manage_own_api_key` - privilege, this API returns only the API keys that you own. If you have `read_security`, - `manage_api_key` or greater privileges (including `manage_security`), this API - returns all API keys regardless of ownership. + Get API key information. Retrieves information for one or more API keys. NOTE: + If you have only the `manage_own_api_key` privilege, this API returns only the + API keys that you own. If you have `read_security`, `manage_api_key` or greater + privileges (including `manage_security`), this API returns all API keys regardless + of ownership. ``_ @@ -1789,7 +1906,7 @@ async def has_privileges( cluster: t.Optional[ t.Sequence[ t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_connector_secrets', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", + "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", str, ] ] @@ -1802,7 +1919,8 @@ async def has_privileges( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Determines whether the specified user has a specified list of privileges. + Check user privileges. Determines whether the specified user has a specified + list of privileges. ``_ @@ -1921,13 +2039,13 @@ async def invalidate_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Invalidates one or more API keys. The `manage_api_key` privilege allows deleting - any API keys. The `manage_own_api_key` only allows deleting API keys that are - owned by the user. In addition, with the `manage_own_api_key` privilege, an invalidation - request must be issued in one of the three formats: - Set the parameter `owner=true`. - - Or, set both `username` and `realm_name` to match the user’s identity. - Or, - if the request is issued by an API key, i.e. an API key invalidates itself, specify - its ID in the `ids` field. + Invalidate API keys. Invalidates one or more API keys. The `manage_api_key` privilege + allows deleting any API keys. The `manage_own_api_key` only allows deleting API + keys that are owned by the user. In addition, with the `manage_own_api_key` privilege, + an invalidation request must be issued in one of the three formats: - Set the + parameter `owner=true`. - Or, set both `username` and `realm_name` to match the + user’s identity. - Or, if the request is issued by an API key, i.e. an API key + invalidates itself, specify its ID in the `ids` field. ``_ @@ -2102,6 +2220,7 @@ async def put_privileges( body_fields=( "applications", "cluster", + "description", "global_", "indices", "metadata", @@ -2118,11 +2237,12 @@ async def put_role( cluster: t.Optional[ t.Sequence[ t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_connector_secrets', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", + "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", str, ] ] ] = None, + description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, global_: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2148,6 +2268,7 @@ async def put_role( :param applications: A list of application privilege entries. :param cluster: A list of cluster privileges. These privileges define the cluster-level actions for users with this role. + :param description: Optional description of the role descriptor :param global_: An object defining global privileges. A global privilege is a form of cluster privilege that is request-aware. Support for global privileges is currently limited to the management of application privileges. @@ -2189,6 +2310,8 @@ async def put_role( __body["applications"] = applications if cluster is not None: __body["cluster"] = cluster + if description is not None: + __body["description"] = description if global_ is not None: __body["global"] = global_ if indices is not None: @@ -2425,8 +2548,8 @@ async def query_api_keys( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for API keys in a paginated manner. You can optionally - filter the results with a query. + Query API keys. Retrieves a paginated list of API keys and their information. + You can optionally filter the results with a query. ``_ @@ -2526,6 +2649,181 @@ async def query_api_keys( path_parts=__path_parts, ) + @_rewrite_parameters( + body_fields=("from_", "query", "search_after", "size", "sort"), + parameter_aliases={"from": "from_"}, + ) + async def query_role( + self, + *, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + from_: t.Optional[int] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + query: t.Optional[t.Mapping[str, t.Any]] = None, + search_after: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, + size: t.Optional[int] = None, + sort: t.Optional[ + t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], + t.Union[str, t.Mapping[str, t.Any]], + ] + ] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Retrieves roles in a paginated manner. You can optionally filter the results + with a query. + + ``_ + + :param from_: Starting document offset. By default, you cannot page through more + than 10,000 hits using the from and size parameters. To page through more + hits, use the `search_after` parameter. + :param query: A query to filter which roles to return. If the query parameter + is missing, it is equivalent to a `match_all` query. The query supports a + subset of query types, including `match_all`, `bool`, `term`, `terms`, `match`, + `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. + You can query the following information associated with roles: `name`, `description`, + `metadata`, `applications.application`, `applications.privileges`, `applications.resources`. + :param search_after: Search after definition + :param size: The number of hits to return. By default, you cannot page through + more than 10,000 hits using the `from` and `size` parameters. To page through + more hits, use the `search_after` parameter. + :param sort: All public fields of a role are eligible for sorting. In addition, + sort can also be applied to the `_doc` field to sort by index order. + """ + __path_parts: t.Dict[str, str] = {} + __path = "/_security/_query/role" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if not __body: + if from_ is not None: + __body["from"] = from_ + if query is not None: + __body["query"] = query + if search_after is not None: + __body["search_after"] = search_after + if size is not None: + __body["size"] = size + if sort is not None: + __body["sort"] = sort + if not __body: + __body = None # type: ignore[assignment] + __headers = {"accept": "application/json"} + if __body is not None: + __headers["content-type"] = "application/json" + return await self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.query_role", + path_parts=__path_parts, + ) + + @_rewrite_parameters( + body_fields=("from_", "query", "search_after", "size", "sort"), + parameter_aliases={"from": "from_"}, + ) + async def query_user( + self, + *, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + from_: t.Optional[int] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + query: t.Optional[t.Mapping[str, t.Any]] = None, + search_after: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, + size: t.Optional[int] = None, + sort: t.Optional[ + t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], + t.Union[str, t.Mapping[str, t.Any]], + ] + ] = None, + with_profile_uid: t.Optional[bool] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Retrieves information for Users in a paginated manner. You can optionally filter + the results with a query. + + ``_ + + :param from_: Starting document offset. By default, you cannot page through more + than 10,000 hits using the from and size parameters. To page through more + hits, use the `search_after` parameter. + :param query: A query to filter which users to return. If the query parameter + is missing, it is equivalent to a `match_all` query. The query supports a + subset of query types, including `match_all`, `bool`, `term`, `terms`, `match`, + `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. + You can query the following information associated with user: `username`, + `roles`, `enabled` + :param search_after: Search after definition + :param size: The number of hits to return. By default, you cannot page through + more than 10,000 hits using the `from` and `size` parameters. To page through + more hits, use the `search_after` parameter. + :param sort: Fields eligible for sorting are: username, roles, enabled In addition, + sort can also be applied to the `_doc` field to sort by index order. + :param with_profile_uid: If true will return the User Profile ID for the users + in the query result, if any. + """ + __path_parts: t.Dict[str, str] = {} + __path = "/_security/_query/user" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if with_profile_uid is not None: + __query["with_profile_uid"] = with_profile_uid + if not __body: + if from_ is not None: + __body["from"] = from_ + if query is not None: + __body["query"] = query + if search_after is not None: + __body["search_after"] = search_after + if size is not None: + __body["size"] = size + if sort is not None: + __body["sort"] = sort + if not __body: + __body = None # type: ignore[assignment] + __headers = {"accept": "application/json"} + if __body is not None: + __headers["content-type"] = "application/json" + return await self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.query_user", + path_parts=__path_parts, + ) + @_rewrite_parameters( body_fields=("content", "ids", "realm"), ) @@ -2965,22 +3263,22 @@ async def update_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates attributes of an existing API key. Users can only update API keys that - they created or that were granted to them. Use this API to update API keys created - by the create API Key or grant API Key APIs. If you need to apply the same update - to many API keys, you can use bulk update API Keys to reduce overhead. It’s not - possible to update expired API keys, or API keys that have been invalidated by - invalidate API Key. This API supports updates to an API key’s access scope and - metadata. The access scope of an API key is derived from the `role_descriptors` - you specify in the request, and a snapshot of the owner user’s permissions at - the time of the request. The snapshot of the owner’s permissions is updated automatically - on every call. If you don’t specify `role_descriptors` in the request, a call - to this API might still change the API key’s access scope. This change can occur - if the owner user’s permissions have changed since the API key was created or - last modified. To update another user’s API key, use the `run_as` feature to - submit a request on behalf of another user. IMPORTANT: It’s not possible to use - an API key as the authentication credential for this API. To update an API key, - the owner user’s credentials are required. + Update an API key. Updates attributes of an existing API key. Users can only + update API keys that they created or that were granted to them. Use this API + to update API keys created by the create API Key or grant API Key APIs. If you + need to apply the same update to many API keys, you can use bulk update API Keys + to reduce overhead. It’s not possible to update expired API keys, or API keys + that have been invalidated by invalidate API Key. This API supports updates to + an API key’s access scope and metadata. The access scope of an API key is derived + from the `role_descriptors` you specify in the request, and a snapshot of the + owner user’s permissions at the time of the request. The snapshot of the owner’s + permissions is updated automatically on every call. If you don’t specify `role_descriptors` + in the request, a call to this API might still change the API key’s access scope. + This change can occur if the owner user’s permissions have changed since the + API key was created or last modified. To update another user’s API key, use the + `run_as` feature to submit a request on behalf of another user. IMPORTANT: It’s + not possible to use an API key as the authentication credential for this API. + To update an API key, the owner user’s credentials are required. ``_ diff --git a/elasticsearch/_async/client/tasks.py b/elasticsearch/_async/client/tasks.py index cf5ebd978d..ca693d58af 100644 --- a/elasticsearch/_async/client/tasks.py +++ b/elasticsearch/_async/client/tasks.py @@ -99,7 +99,8 @@ async def get( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about a task. + Get task information. Returns information about the tasks currently executing + in the cluster. ``_ diff --git a/elasticsearch/_async/client/transform.py b/elasticsearch/_async/client/transform.py index 90807b9cfa..c5970fc667 100644 --- a/elasticsearch/_async/client/transform.py +++ b/elasticsearch/_async/client/transform.py @@ -39,7 +39,7 @@ async def delete_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a transform. + Delete a transform. Deletes a transform. ``_ @@ -99,7 +99,7 @@ async def get_transform( size: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves configuration information for transforms. + Get transforms. Retrieves configuration information for transforms. ``_ @@ -168,7 +168,7 @@ async def get_transform_stats( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves usage information for transforms. + Get transform stats. Retrieves usage information for transforms. ``_ @@ -249,10 +249,12 @@ async def preview_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Previews a transform. It returns a maximum of 100 results. The calculations are - based on all the current data in the source index. It also generates a list of - mappings and settings for the destination index. These values are determined - based on the field types of the source index and the transform aggregations. + Preview a transform. Generates a preview of the results that you will get when + you create a transform with the same configuration. It returns a maximum of 100 + results. The calculations are based on all the current data in the source index. + It also generates a list of mappings and settings for the destination index. + These values are determined based on the field types of the source index and + the transform aggregations. ``_ @@ -369,26 +371,27 @@ async def put_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a transform. A transform copies data from source indices, transforms - it, and persists it into an entity-centric destination index. You can also think - of the destination index as a two-dimensional tabular data structure (known as - a data frame). The ID for each document in the data frame is generated from a - hash of the entity, so there is a unique row per entity. You must choose either - the latest or pivot method for your transform; you cannot use both in a single - transform. If you choose to use the pivot method for your transform, the entities - are defined by the set of `group_by` fields in the pivot object. If you choose - to use the latest method, the entities are defined by the `unique_key` field - values in the latest object. You must have `create_index`, `index`, and `read` - privileges on the destination index and `read` and `view_index_metadata` privileges - on the source indices. When Elasticsearch security features are enabled, the - transform remembers which roles the user that created it had at the time of creation - and uses those same roles. If those roles do not have the required privileges - on the source and destination indices, the transform fails when it attempts unauthorized - operations. NOTE: You must use Kibana or this API to create a transform. Do not - add a transform directly into any `.transform-internal*` indices using the Elasticsearch - index API. If Elasticsearch security features are enabled, do not give users - any privileges on `.transform-internal*` indices. If you used transforms prior - to 7.5, also do not give users any privileges on `.data-frame-internal*` indices. + Create a transform. Creates a transform. A transform copies data from source + indices, transforms it, and persists it into an entity-centric destination index. + You can also think of the destination index as a two-dimensional tabular data + structure (known as a data frame). The ID for each document in the data frame + is generated from a hash of the entity, so there is a unique row per entity. + You must choose either the latest or pivot method for your transform; you cannot + use both in a single transform. If you choose to use the pivot method for your + transform, the entities are defined by the set of `group_by` fields in the pivot + object. If you choose to use the latest method, the entities are defined by the + `unique_key` field values in the latest object. You must have `create_index`, + `index`, and `read` privileges on the destination index and `read` and `view_index_metadata` + privileges on the source indices. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. NOTE: You must use Kibana or this API to create + a transform. Do not add a transform directly into any `.transform-internal*` + indices using the Elasticsearch index API. If Elasticsearch security features + are enabled, do not give users any privileges on `.transform-internal*` indices. + If you used transforms prior to 7.5, also do not give users any privileges on + `.data-frame-internal*` indices. ``_ @@ -488,9 +491,9 @@ async def reset_transform( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Resets a transform. Before you can reset it, you must stop it; alternatively, - use the `force` query parameter. If the destination index was created by the - transform, it is deleted. + Reset a transform. Resets a transform. Before you can reset it, you must stop + it; alternatively, use the `force` query parameter. If the destination index + was created by the transform, it is deleted. ``_ @@ -538,10 +541,11 @@ async def schedule_now_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Schedules now a transform. If you _schedule_now a transform, it will process - the new data instantly, without waiting for the configured frequency interval. - After _schedule_now API is called, the transform will be processed again at now - + frequency unless _schedule_now API is called again in the meantime. + Schedule a transform to start now. Instantly runs a transform to process data. + If you _schedule_now a transform, it will process the new data instantly, without + waiting for the configured frequency interval. After _schedule_now API is called, + the transform will be processed again at now + frequency unless _schedule_now + API is called again in the meantime. ``_ @@ -588,23 +592,24 @@ async def start_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Starts a transform. When you start a transform, it creates the destination index - if it does not already exist. The `number_of_shards` is set to `1` and the `auto_expand_replicas` - is set to `0-1`. If it is a pivot transform, it deduces the mapping definitions - for the destination index from the source indices and the transform aggregations. - If fields in the destination index are derived from scripts (as in the case of - `scripted_metric` or `bucket_script` aggregations), the transform uses dynamic - mappings unless an index template exists. If it is a latest transform, it does - not deduce mapping definitions; it uses dynamic mappings. To use explicit mappings, - create the destination index before you start the transform. Alternatively, you - can create an index template, though it does not affect the deduced mappings - in a pivot transform. When the transform starts, a series of validations occur - to ensure its success. If you deferred validation when you created the transform, - they occur when you start the transform—​with the exception of privilege checks. - When Elasticsearch security features are enabled, the transform remembers which - roles the user that created it had at the time of creation and uses those same - roles. If those roles do not have the required privileges on the source and destination - indices, the transform fails when it attempts unauthorized operations. + Start a transform. Starts a transform. When you start a transform, it creates + the destination index if it does not already exist. The `number_of_shards` is + set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot transform, + it deduces the mapping definitions for the destination index from the source + indices and the transform aggregations. If fields in the destination index are + derived from scripts (as in the case of `scripted_metric` or `bucket_script` + aggregations), the transform uses dynamic mappings unless an index template exists. + If it is a latest transform, it does not deduce mapping definitions; it uses + dynamic mappings. To use explicit mappings, create the destination index before + you start the transform. Alternatively, you can create an index template, though + it does not affect the deduced mappings in a pivot transform. When the transform + starts, a series of validations occur to ensure its success. If you deferred + validation when you created the transform, they occur when you start the transform—​with + the exception of privilege checks. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. ``_ @@ -658,7 +663,7 @@ async def stop_transform( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Stops one or more transforms. + Stop transforms. Stops one or more transforms. ``_ @@ -751,13 +756,14 @@ async def update_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates certain properties of a transform. All updated properties except `description` - do not take effect until after the transform starts the next checkpoint, thus - there is data consistency in each checkpoint. To use this API, you must have - `read` and `view_index_metadata` privileges for the source indices. You must - also have `index` and `read` privileges for the destination index. When Elasticsearch - security features are enabled, the transform remembers which roles the user who - updated it had at the time of update and runs with those privileges. + Update a transform. Updates certain properties of a transform. All updated properties + except `description` do not take effect until after the transform starts the + next checkpoint, thus there is data consistency in each checkpoint. To use this + API, you must have `read` and `view_index_metadata` privileges for the source + indices. You must also have `index` and `read` privileges for the destination + index. When Elasticsearch security features are enabled, the transform remembers + which roles the user who updated it had at the time of update and runs with those + privileges. ``_ diff --git a/elasticsearch/_sync/client/__init__.py b/elasticsearch/_sync/client/__init__.py index 180f4bc659..8e41419962 100644 --- a/elasticsearch/_sync/client/__init__.py +++ b/elasticsearch/_sync/client/__init__.py @@ -636,8 +636,9 @@ def bulk( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs multiple indexing or delete operations in a single API call. This reduces - overhead and can greatly increase indexing speed. + Bulk index or delete documents. Performs multiple indexing or delete operations + in a single API call. This reduces overhead and can greatly increase indexing + speed. ``_ @@ -993,9 +994,9 @@ def create( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -1099,7 +1100,7 @@ def delete( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a JSON document from the specified index. + Delete a document. Removes a JSON document from the specified index. ``_ @@ -1223,7 +1224,7 @@ def delete_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes documents that match the specified query. + Delete documents. Deletes documents that match the specified query. ``_ @@ -1449,7 +1450,7 @@ def delete_script( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a stored script or search template. + Delete a script or search template. Deletes a stored script or search template. ``_ @@ -1517,7 +1518,7 @@ def exists( ] = None, ) -> HeadApiResponse: """ - Checks if a document in an index exists. + Check a document. Checks if a specified document exists. ``_ @@ -1618,7 +1619,7 @@ def exists_source( ] = None, ) -> HeadApiResponse: """ - Checks if a document's `_source` is stored. + Check for a document source. Checks if a document's `_source` is stored. ``_ @@ -1718,8 +1719,8 @@ def explain( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about why a specific document matches (or doesn’t match) - a query. + Explain a document match result. Returns information about why a specific document + matches, or doesn’t match, a query. ``_ @@ -1957,7 +1958,8 @@ def get( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns a document. + Get a document by its ID. Retrieves the document with the specified ID from an + index. ``_ @@ -2048,7 +2050,7 @@ def get_script( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves a stored script or search template. + Get a script or search template. Retrieves a stored script or search template. ``_ @@ -2180,7 +2182,7 @@ def get_source( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns the source of a document. + Get a document's source. Returns the source of a document. ``_ @@ -2338,9 +2340,9 @@ def index( ] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a JSON document to the specified data stream or index and makes it searchable. - If the target is an index and the document already exists, the request updates - the document and increments its version. + Index a document. Adds a JSON document to the specified data stream or index + and makes it searchable. If the target is an index and the document already exists, + the request updates the document and increments its version. ``_ @@ -2449,7 +2451,7 @@ def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns basic information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ """ @@ -3129,7 +3131,8 @@ def put_script( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a stored script or search template. + Create or update a script or search template. Creates or updates a stored script + or search template. ``_ @@ -3309,9 +3312,9 @@ def reindex( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Allows to copy documents from one index to another, optionally filtering the - source documents by a query, changing the destination index settings, or fetching - the documents from a remote cluster. + Reindex documents. Copies documents from a source to a destination. The source + can be any existing index, alias, or data stream. The destination must differ + from the source. For example, you cannot reindex a data stream into itself. ``_ @@ -3526,7 +3529,7 @@ def scripts_painless_execute( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Runs a script and returns a result. + Run a script. Runs a script and returns a result. ``_ @@ -4196,8 +4199,7 @@ def search_mvt( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> BinaryApiResponse: """ - Searches a vector tile for geospatial values. Returns results as a binary Mapbox - vector tile. + Search a vector tile. Searches a vector tile for geospatial values. ``_ @@ -4685,8 +4687,8 @@ def termvectors( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information and statistics about terms in the fields of a particular - document. + Get term vector information. Returns information and statistics about terms in + the fields of a particular document. ``_ @@ -4828,7 +4830,8 @@ def update( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates a document with a script or partial document. + Update a document. Updates a document by running a script or passing a partial + document. ``_ @@ -4993,9 +4996,9 @@ def update_by_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates documents that match the specified query. If no query is specified, performs - an update on every document in the data stream or index without modifying the - source, which is useful for picking up mapping changes. + Update documents. Updates documents that match the specified query. If no query + is specified, performs an update on every document in the data stream or index + without modifying the source, which is useful for picking up mapping changes. ``_ diff --git a/elasticsearch/_sync/client/cat.py b/elasticsearch/_sync/client/cat.py index 39d64c8fc4..fbdde503bd 100644 --- a/elasticsearch/_sync/client/cat.py +++ b/elasticsearch/_sync/client/cat.py @@ -53,11 +53,11 @@ def aliases( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Retrieves the cluster’s index aliases, including filter and routing information. - The API does not return data stream aliases. IMPORTANT: cat APIs are only intended + Get aliases. Retrieves the cluster’s index aliases, including filter and routing + information. The API does not return data stream aliases. CAT APIs are only intended for human consumption using the command line or the Kibana console. They are - not intended for use by applications. For application consumption, use the aliases - API. + not intended for use by applications. For application consumption, use the /_alias + endpoints. ``_ @@ -231,11 +231,12 @@ def component_templates( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns information about component templates in a cluster. Component templates - are building blocks for constructing index templates that specify index mappings, - settings, and aliases. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get component template API. + Get component templates. Returns information about component templates in a cluster. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. CAT APIs are only intended for + human consumption using the command line or Kibana console. They are not intended + for use by applications. For application consumption, use the /_component_template + endpoints. ``_ @@ -316,12 +317,12 @@ def count( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Provides quick access to a document count for a data stream, an index, or an - entire cluster. NOTE: The document count only includes live documents, not deleted - documents which have not yet been removed by the merge process. IMPORTANT: cat + Get a document count. Provides quick access to a document count for a data stream, + an index, or an entire cluster.n/ The document count only includes live documents, + not deleted documents which have not yet been removed by the merge process. CAT APIs are only intended for human consumption using the command line or Kibana console. They are not intended for use by applications. For application consumption, - use the count API. + use /_count endpoints. ``_ @@ -585,7 +586,7 @@ def help( v: t.Optional[bool] = None, ) -> TextApiResponse: """ - Returns help for the Cat APIs. + Get CAT help. Returns help for the CAT APIs. ``_ @@ -676,16 +677,16 @@ def indices( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns high-level information about indices in a cluster, including backing - indices for data streams. IMPORTANT: cat APIs are only intended for human consumption - using the command line or Kibana console. They are not intended for use by applications. - For application consumption, use the get index API. Use the cat indices API to - get the following information for each index in a cluster: shard count; document - count; deleted document count; primary store size; total store size of all shards, - including shard replicas. These metrics are retrieved directly from Lucene, which - Elasticsearch uses internally to power indexing and search. As a result, all - document counts include hidden nested documents. To get an accurate count of - Elasticsearch documents, use the cat count or count APIs. + Get index information. Returns high-level information about indices in a cluster, + including backing indices for data streams. Use this request to get the following + information for each index in a cluster: - shard count - document count - deleted + document count - primary store size - total store size of all shards, including + shard replicas These metrics are retrieved directly from Lucene, which Elasticsearch + uses internally to power indexing and search. As a result, all document counts + include hidden nested documents. To get an accurate count of Elasticsearch documents, + use the /_cat/count or _count endpoints. CAT APIs are only intended for human + consumption using the command line or Kibana console. They are not intended for + use by applications. For application consumption, use an index endpoint. ``_ @@ -894,10 +895,10 @@ def ml_data_frame_analytics( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about data frame analytics jobs. - IMPORTANT: cat APIs are only intended for human consumption using the Kibana - console or command line. They are not intended for use by applications. For application - consumption, use the get data frame analytics jobs statistics API. + Get data frame analytics jobs. Returns configuration and usage information about + data frame analytics jobs. CAT APIs are only intended for human consumption using + the Kibana console or command line. They are not intended for use by applications. + For application consumption, use the /_ml/data_frame/analytics endpoints. ``_ @@ -1016,12 +1017,12 @@ def ml_datafeeds( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about datafeeds. This API returns - a maximum of 10,000 datafeeds. If the Elasticsearch security features are enabled, - you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges - to use this API. IMPORTANT: cat APIs are only intended for human consumption + Get datafeeds. Returns configuration and usage information about datafeeds. This + API returns a maximum of 10,000 datafeeds. If the Elasticsearch security features + are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` + cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. - For application consumption, use the get datafeed statistics API. + For application consumption, use the /_ml/datafeeds endpoints. ``_ @@ -1146,13 +1147,13 @@ def ml_jobs( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information for anomaly detection jobs. This - API returns a maximum of 10,000 jobs. If the Elasticsearch security features - are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` - cluster privileges to use this API. IMPORTANT: cat APIs are only intended for + Get anomaly detection jobs. Returns configuration and usage information for anomaly + detection jobs. This API returns a maximum of 10,000 jobs. If the Elasticsearch + security features are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, + or `manage` cluster privileges to use this API. CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended - for use by applications. For application consumption, use the get anomaly detection - job statistics API. + for use by applications. For application consumption, use the /_ml/anomaly_detectors + endpoints. ``_ @@ -1280,10 +1281,10 @@ def ml_trained_models( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about inference trained models. IMPORTANT: - cat APIs are only intended for human consumption using the Kibana console or - command line. They are not intended for use by applications. For application - consumption, use the get trained models statistics API. + Get trained models. Returns configuration and usage information about inference + trained models. CAT APIs are only intended for human consumption using the Kibana + console or command line. They are not intended for use by applications. For application + consumption, use the /_ml/trained_models endpoints. ``_ @@ -2458,10 +2459,10 @@ def transforms( v: t.Optional[bool] = None, ) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]: """ - Returns configuration and usage information about transforms. IMPORTANT: cat - APIs are only intended for human consumption using the Kibana console or command - line. They are not intended for use by applications. For application consumption, - use the get transform statistics API. + Get transforms. Returns configuration and usage information about transforms. + CAT APIs are only intended for human consumption using the Kibana console or + command line. They are not intended for use by applications. For application + consumption, use the /_transform endpoints. ``_ diff --git a/elasticsearch/_sync/client/cluster.py b/elasticsearch/_sync/client/cluster.py index 961939fc99..5b86a61bcf 100644 --- a/elasticsearch/_sync/client/cluster.py +++ b/elasticsearch/_sync/client/cluster.py @@ -115,8 +115,9 @@ def delete_component_template( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes component templates. Component templates are building blocks for constructing - index templates that specify index mappings, settings, and aliases. + Delete component templates. Deletes component templates. Component templates + are building blocks for constructing index templates that specify index mappings, + settings, and aliases. ``_ @@ -215,7 +216,8 @@ def exists_component_template( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Returns information about whether a particular component template exist + Check component templates. Returns information about whether a particular component + template exists. ``_ @@ -272,7 +274,7 @@ def get_component_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about component templates. + Get component templates. Retrieves information about component templates. ``_ @@ -536,7 +538,7 @@ def info( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns different information about the cluster. + Get cluster info. Returns basic information about the cluster. ``_ @@ -699,17 +701,18 @@ def put_component_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates a component template. Component templates are building blocks - for constructing index templates that specify index mappings, settings, and aliases. - An index template can be composed of multiple component templates. To use a component - template, specify it in an index template’s `composed_of` list. Component templates - are only applied to new data streams and indices as part of a matching index - template. Settings and mappings specified directly in the index template or the - create index request override any settings or mappings specified in a component - template. Component templates are only used during index creation. For data streams, - this includes data stream creation and the creation of a stream’s backing indices. - Changes to component templates do not affect existing indices, including a stream’s - backing indices. You can use C-style `/* *\\/` block comments in component templates. + Create or update a component template. Creates or updates a component template. + Component templates are building blocks for constructing index templates that + specify index mappings, settings, and aliases. An index template can be composed + of multiple component templates. To use a component template, specify it in an + index template’s `composed_of` list. Component templates are only applied to + new data streams and indices as part of a matching index template. Settings and + mappings specified directly in the index template or the create index request + override any settings or mappings specified in a component template. Component + templates are only used during index creation. For data streams, this includes + data stream creation and the creation of a stream’s backing indices. Changes + to component templates do not affect existing indices, including a stream’s backing + indices. You can use C-style `/* *\\/` block comments in component templates. You can include comments anywhere in the request body except before the opening curly bracket. diff --git a/elasticsearch/_sync/client/enrich.py b/elasticsearch/_sync/client/enrich.py index c6bf5f24bf..6a855c4024 100644 --- a/elasticsearch/_sync/client/enrich.py +++ b/elasticsearch/_sync/client/enrich.py @@ -36,7 +36,7 @@ def delete_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an existing enrich policy and its enrich index. + Delete an enrich policy. Deletes an existing enrich policy and its enrich index. ``_ @@ -121,7 +121,7 @@ def get_policy( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about an enrich policy. + Get an enrich policy. Returns information about an enrich policy. ``_ @@ -171,7 +171,7 @@ def put_policy( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an enrich policy. + Create an enrich policy. Creates an enrich policy. ``_ @@ -224,8 +224,8 @@ def stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns enrich coordinator statistics and information about enrich policies that - are currently executing. + Get enrich stats. Returns enrich coordinator statistics and information about + enrich policies that are currently executing. ``_ """ diff --git a/elasticsearch/_sync/client/esql.py b/elasticsearch/_sync/client/esql.py index f3acd38795..844223895a 100644 --- a/elasticsearch/_sync/client/esql.py +++ b/elasticsearch/_sync/client/esql.py @@ -26,7 +26,15 @@ class EsqlClient(NamespacedClient): @_rewrite_parameters( - body_fields=("query", "columnar", "filter", "locale", "params"), + body_fields=( + "query", + "columnar", + "filter", + "locale", + "params", + "profile", + "tables", + ), ignore_deprecated_options={"params"}, ) def query( @@ -35,14 +43,21 @@ def query( query: t.Optional[str] = None, columnar: t.Optional[bool] = None, delimiter: t.Optional[str] = None, + drop_null_columns: t.Optional[bool] = None, error_trace: t.Optional[bool] = None, filter: t.Optional[t.Mapping[str, t.Any]] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, format: t.Optional[str] = None, human: t.Optional[bool] = None, locale: t.Optional[str] = None, - params: t.Optional[t.Sequence[t.Union[None, bool, float, int, str]]] = None, + params: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, pretty: t.Optional[bool] = None, + profile: t.Optional[bool] = None, + tables: t.Optional[ + t.Mapping[str, t.Mapping[str, t.Mapping[str, t.Any]]] + ] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -58,6 +73,10 @@ def query( row represents all the values of a certain column in the results. :param delimiter: The character to use between values within a CSV row. Only valid for the CSV format. + :param drop_null_columns: Should columns that are entirely `null` be removed + from the `columns` and `values` portion of the results? Defaults to `false`. + If `true` then the response will include an extra section under the name + `all_columns` which has the name of all columns. :param filter: Specify a Query DSL query in the filter parameter to filter the set of documents that an ES|QL query runs on. :param format: A short version of the Accept header, e.g. json, yaml. @@ -65,6 +84,12 @@ def query( :param params: To avoid any attempts of hacking or code injection, extract the values in a separate list of parameters. Use question mark placeholders (?) in the query string for each of the parameters. + :param profile: If provided and `true` the response will include an extra `profile` + object with information on how the query was executed. This information is + for human debugging and its format can change at any time but it can give + some insight into the performance of each part of the query. + :param tables: Tables to use with the LOOKUP operation. The top level key is + the table name and the next level key is the column name. """ if query is None and body is None: raise ValueError("Empty value passed for parameter 'query'") @@ -74,6 +99,8 @@ def query( __body: t.Dict[str, t.Any] = body if body is not None else {} if delimiter is not None: __query["delimiter"] = delimiter + if drop_null_columns is not None: + __query["drop_null_columns"] = drop_null_columns if error_trace is not None: __query["error_trace"] = error_trace if filter_path is not None: @@ -95,6 +122,10 @@ def query( __body["locale"] = locale if params is not None: __body["params"] = params + if profile is not None: + __body["profile"] = profile + if tables is not None: + __body["tables"] = tables __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "POST", diff --git a/elasticsearch/_sync/client/indices.py b/elasticsearch/_sync/client/indices.py index d2e5b08d33..3ad687a0fb 100644 --- a/elasticsearch/_sync/client/indices.py +++ b/elasticsearch/_sync/client/indices.py @@ -51,7 +51,8 @@ def add_block( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a block to an index. + Add an index block. Limits the operations allowed on an index by blocking specific + operation types. ``_ @@ -500,7 +501,7 @@ def create( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index. + Create an index. Creates a new index. ``_ @@ -571,8 +572,8 @@ def create_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a data stream. You must have a matching index template with data stream - enabled. + Create a data stream. Creates a data stream. You must have a matching index template + with data stream enabled. ``_ @@ -624,7 +625,7 @@ def data_streams_stats( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves statistics for one or more data streams. + Get data stream stats. Retrieves statistics for one or more data streams. ``_ @@ -687,7 +688,7 @@ def delete( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more indices. + Delete indices. Deletes one or more indices. ``_ @@ -759,7 +760,7 @@ def delete_alias( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes a data stream or index from an alias. + Delete an alias. Removes a data stream or index from an alias. ``_ @@ -825,8 +826,8 @@ def delete_data_lifecycle( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Removes the data lifecycle from a data stream rendering it not managed by the - data stream lifecycle + Delete data stream lifecycles. Removes the data stream lifecycle from a data + stream, rendering it not managed by the data stream lifecycle. ``_ @@ -885,7 +886,7 @@ def delete_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes one or more data streams and their backing indices. + Delete data streams. Deletes one or more data streams and their backing indices. ``_ @@ -934,9 +935,10 @@ def delete_index_template( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - The provided may contain multiple template names separated by - a comma. If multiple template names are specified then there is no wildcard support - and the provided names should match completely with existing templates. + Delete an index template. The provided may contain multiple + template names separated by a comma. If multiple template names are specified + then there is no wildcard support and the provided names should match completely + with existing templates. ``_ @@ -1195,7 +1197,8 @@ def exists( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if a data stream, index, or alias exists. + Check indices. Checks if one or more indices, index aliases, or data streams + exist. ``_ @@ -1273,7 +1276,7 @@ def exists_alias( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Checks if an alias exists. + Check aliases. Checks if one or more data stream or index aliases exist. ``_ @@ -1396,7 +1399,8 @@ def exists_template( pretty: t.Optional[bool] = None, ) -> HeadApiResponse: """ - Returns information about whether a particular index template exists. + Check existence of index templates. Returns information about whether a particular + index template exists. ``_ @@ -1450,8 +1454,10 @@ def explain_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about the index's current data stream lifecycle, such as - any potential encountered error, time since creation etc. + Get the status for a data stream lifecycle. Retrieves information about an index + or data stream’s current data stream lifecycle status, such as time since index + creation, time since rollover, the lifecycle configuration managing the index, + or any errors encountered during lifecycle execution. ``_ @@ -1784,8 +1790,8 @@ def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about one or more indices. For data streams, the API returns - information about the stream’s backing indices. + Get index information. Returns information about one or more indices. For data + streams, the API returns information about the stream’s backing indices. ``_ @@ -1874,7 +1880,7 @@ def get_alias( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more aliases. + Get aliases. Retrieves information for one or more data stream or index aliases. ``_ @@ -1955,7 +1961,8 @@ def get_data_lifecycle( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves the data stream lifecycle configuration of one or more data streams. + Get data stream lifecycles. Retrieves the data stream lifecycle configuration + of one or more data streams. ``_ @@ -2014,7 +2021,7 @@ def get_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about one or more data streams. + Get data streams. Retrieves information about one or more data streams. ``_ @@ -2080,8 +2087,8 @@ def get_field_mapping( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves mapping definitions for one or more fields. For data streams, the API - retrieves field mappings for the stream’s backing indices. + Get mapping definitions. Retrieves mapping definitions for one or more fields. + For data streams, the API retrieves field mappings for the stream’s backing indices. ``_ @@ -2160,7 +2167,7 @@ def get_index_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about one or more index templates. + Get index templates. Returns information about one or more index templates. ``_ @@ -2235,8 +2242,8 @@ def get_mapping( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves mapping definitions for one or more indices. For data streams, the - API retrieves mappings for the stream’s backing indices. + Get mapping definitions. Retrieves mapping definitions for one or more indices. + For data streams, the API retrieves mappings for the stream’s backing indices. ``_ @@ -2322,8 +2329,8 @@ def get_settings( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns setting information for one or more indices. For data streams, returns - setting information for the stream’s backing indices. + Get index settings. Returns setting information for one or more indices. For + data streams, returns setting information for the stream’s backing indices. ``_ @@ -2412,7 +2419,7 @@ def get_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information about one or more index templates. + Get index templates. Retrieves information about one or more index templates. ``_ @@ -2469,14 +2476,14 @@ def migrate_to_data_stream( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Converts an index alias to a data stream. You must have a matching index template - that is data stream enabled. The alias must meet the following criteria: The - alias must have a write index; All indices for the alias must have a `@timestamp` - field mapping of a `date` or `date_nanos` field type; The alias must not have - any filters; The alias must not use custom routing. If successful, the request - removes the alias and creates a data stream with the same name. The indices for - the alias become hidden backing indices for the stream. The write index for the - alias becomes the write index for the stream. + Convert an index alias to a data stream. Converts an index alias to a data stream. + You must have a matching index template that is data stream enabled. The alias + must meet the following criteria: The alias must have a write index; All indices + for the alias must have a `@timestamp` field mapping of a `date` or `date_nanos` + field type; The alias must not have any filters; The alias must not use custom + routing. If successful, the request removes the alias and creates a data stream + with the same name. The indices for the alias become hidden backing indices for + the stream. The write index for the alias becomes the write index for the stream. ``_ @@ -2519,7 +2526,8 @@ def modify_data_stream( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Performs one or more data stream modification actions in a single atomic operation. + Update data streams. Performs one or more data stream modification actions in + a single atomic operation. ``_ @@ -2715,7 +2723,7 @@ def put_alias( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -2821,7 +2829,8 @@ def put_data_lifecycle( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Update the data lifecycle of the specified data streams. + Update data stream lifecycles. Update the data stream lifecycle of the specified + data streams. ``_ @@ -2924,7 +2933,7 @@ def put_index_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates an index template. Index templates define settings, mappings, + Create or update an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. ``_ @@ -3086,9 +3095,9 @@ def put_mapping( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds new fields to an existing data stream or index. You can also use this API - to change the search settings of existing fields. For data streams, these changes - are applied to all backing indices by default. + Update field mappings. Adds new fields to an existing data stream or index. You + can also use this API to change the search settings of existing fields. For data + streams, these changes are applied to all backing indices by default. ``_ @@ -3219,8 +3228,8 @@ def put_settings( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Changes a dynamic index setting in real time. For data streams, index setting - changes are applied to all backing indices by default. + Update index settings. Changes dynamic index settings in real time. For data + streams, index setting changes are applied to all backing indices by default. ``_ @@ -3326,7 +3335,7 @@ def put_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates or updates an index template. Index templates define settings, mappings, + Create or update an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. ``_ @@ -3471,9 +3480,9 @@ def refresh( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - A refresh makes recent operations performed on one or more indices available - for search. For data streams, the API runs the refresh operation on the stream’s - backing indices. + Refresh an index. A refresh makes recent operations performed on one or more + indices available for search. For data streams, the API runs the refresh operation + on the stream’s backing indices. ``_ @@ -3745,7 +3754,7 @@ def rollover( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a new index for a data stream or index alias. + Roll over to a new index. Creates a new index for a data stream or index alias. ``_ @@ -4081,7 +4090,8 @@ def simulate_index_template( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - + Simulate an index. Returns the index configuration that would be applied to the + specified index from an existing index template. ``_ @@ -4160,7 +4170,8 @@ def simulate_template( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns the index configuration that would be applied by a particular index template. + Simulate an index template. Returns the index configuration that would be applied + by a particular index template. ``_ @@ -4567,7 +4578,7 @@ def update_aliases( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Adds a data stream or index to an alias. + Create or update an alias. Adds a data stream or index to an alias. ``_ @@ -4642,7 +4653,7 @@ def validate_query( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Validates a potentially expensive query without executing it. + Validate a query. Validates a query without running it. ``_ diff --git a/elasticsearch/_sync/client/license.py b/elasticsearch/_sync/client/license.py index dead68945d..43135d5a76 100644 --- a/elasticsearch/_sync/client/license.py +++ b/elasticsearch/_sync/client/license.py @@ -72,9 +72,9 @@ def get( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - This API returns information about the type of license, when it was issued, and - when it expires, for example. For more information about the different types - of licenses, see https://www.elastic.co/subscriptions. + Get license information. Returns information about your Elastic license, including + its type, its status, when it was issued, and when it expires. For more information + about the different types of licenses, refer to [Elastic Stack subscriptions](https://www.elastic.co/subscriptions). ``_ diff --git a/elasticsearch/_sync/client/ml.py b/elasticsearch/_sync/client/ml.py index 1338e13816..8931373d8c 100644 --- a/elasticsearch/_sync/client/ml.py +++ b/elasticsearch/_sync/client/ml.py @@ -88,7 +88,7 @@ def close_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Close anomaly detection jobs A job can be opened and closed multiple times throughout + Close anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating @@ -579,7 +579,7 @@ def delete_job( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes an anomaly detection job. All job configuration, model state and results + Delete an anomaly detection job. All job configuration, model state and results are deleted. It is not currently possible to delete multiple jobs using wildcards or a comma separated list. If you delete a job that has a datafeed, the request first tries to delete the datafeed. This behavior is equivalent to calling the @@ -2739,12 +2739,12 @@ def open_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Opens one or more anomaly detection jobs. An anomaly detection job must be opened - in order for it to be ready to receive and analyze data. It can be opened and - closed multiple times throughout its lifecycle. When you open a new job, it starts - with an empty model. When you open an existing job, the most recent model state - is automatically loaded. The job is ready to resume its analysis from where it - left off, once new data is received. + Open anomaly detection jobs. An anomaly detection job must be opened in order + for it to be ready to receive and analyze data. It can be opened and closed multiple + times throughout its lifecycle. When you open a new job, it starts with an empty + model. When you open an existing job, the most recent model state is automatically + loaded. The job is ready to resume its analysis from where it left off, once + new data is received. ``_ @@ -3612,8 +3612,8 @@ def put_job( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Instantiates an anomaly detection job. If you include a `datafeed_config`, you - must have read index privileges on the source index. + Create an anomaly detection job. If you include a `datafeed_config`, you must + have read index privileges on the source index. ``_ diff --git a/elasticsearch/_sync/client/query_rules.py b/elasticsearch/_sync/client/query_rules.py index d0ba355c85..384274985b 100644 --- a/elasticsearch/_sync/client/query_rules.py +++ b/elasticsearch/_sync/client/query_rules.py @@ -250,7 +250,7 @@ def list_rulesets( ) @_rewrite_parameters( - body_fields=("actions", "criteria", "type"), + body_fields=("actions", "criteria", "type", "priority"), ) def put_rule( self, @@ -258,12 +258,15 @@ def put_rule( ruleset_id: str, rule_id: str, actions: t.Optional[t.Mapping[str, t.Any]] = None, - criteria: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + criteria: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, type: t.Optional[t.Union["t.Literal['pinned']", str]] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, pretty: t.Optional[bool] = None, + priority: t.Optional[int] = None, body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ @@ -278,6 +281,7 @@ def put_rule( :param actions: :param criteria: :param type: + :param priority: """ if ruleset_id in SKIP_IN_PATH: raise ValueError("Empty value passed for parameter 'ruleset_id'") @@ -311,6 +315,8 @@ def put_rule( __body["criteria"] = criteria if type is not None: __body["type"] = type + if priority is not None: + __body["priority"] = priority __headers = {"accept": "application/json", "content-type": "application/json"} return self.perform_request( # type: ignore[return-value] "PUT", @@ -329,7 +335,9 @@ def put_ruleset( self, *, ruleset_id: str, - rules: t.Optional[t.Sequence[t.Mapping[str, t.Any]]] = None, + rules: t.Optional[ + t.Union[t.Mapping[str, t.Any], t.Sequence[t.Mapping[str, t.Any]]] + ] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, human: t.Optional[bool] = None, diff --git a/elasticsearch/_sync/client/security.py b/elasticsearch/_sync/client/security.py index b4ce94f75e..2c9be74cd1 100644 --- a/elasticsearch/_sync/client/security.py +++ b/elasticsearch/_sync/client/security.py @@ -97,12 +97,12 @@ def authenticate( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Enables you to submit a request with a basic auth header to authenticate a user - and retrieve information about the authenticated user. A successful call returns - a JSON structure that shows user information such as their username, the roles - that are assigned to the user, any assigned metadata, and information about the - realms that authenticated and authorized the user. If the user cannot be authenticated, - this API returns a 401 status code. + Authenticate a user. Authenticates a user and returns information about the authenticated + user. Include the user information in a [basic auth header](https://en.wikipedia.org/wiki/Basic_access_authentication). + A successful call returns a JSON structure that shows user information such as + their username, the roles that are assigned to the user, any assigned metadata, + and information about the realms that authenticated and authorized the user. + If the user cannot be authenticated, this API returns a 401 status code. ``_ """ @@ -127,6 +127,122 @@ def authenticate( path_parts=__path_parts, ) + @_rewrite_parameters( + body_fields=("names",), + ) + def bulk_delete_role( + self, + *, + names: t.Optional[t.Sequence[str]] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + refresh: t.Optional[ + t.Union["t.Literal['false', 'true', 'wait_for']", bool, str] + ] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + The role management APIs are generally the preferred way to manage roles, rather + than using file-based role management. The bulk delete roles API cannot delete + roles that are defined in roles files. + + ``_ + + :param names: An array of role names to delete + :param refresh: If `true` (the default) then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh to + make this operation visible to search, if `false` then do nothing with refreshes. + """ + if names is None and body is None: + raise ValueError("Empty value passed for parameter 'names'") + __path_parts: t.Dict[str, str] = {} + __path = "/_security/role" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if refresh is not None: + __query["refresh"] = refresh + if not __body: + if names is not None: + __body["names"] = names + __headers = {"accept": "application/json", "content-type": "application/json"} + return self.perform_request( # type: ignore[return-value] + "DELETE", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.bulk_delete_role", + path_parts=__path_parts, + ) + + @_rewrite_parameters( + body_fields=("roles",), + ) + def bulk_put_role( + self, + *, + roles: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + refresh: t.Optional[ + t.Union["t.Literal['false', 'true', 'wait_for']", bool, str] + ] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + The role management APIs are generally the preferred way to manage roles, rather + than using file-based role management. The bulk create or update roles API cannot + update roles that are defined in roles files. + + ``_ + + :param roles: A dictionary of role name to RoleDescriptor objects to add or update + :param refresh: If `true` (the default) then refresh the affected shards to make + this operation visible to search, if `wait_for` then wait for a refresh to + make this operation visible to search, if `false` then do nothing with refreshes. + """ + if roles is None and body is None: + raise ValueError("Empty value passed for parameter 'roles'") + __path_parts: t.Dict[str, str] = {} + __path = "/_security/role" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if refresh is not None: + __query["refresh"] = refresh + if not __body: + if roles is not None: + __body["roles"] = roles + __headers = {"accept": "application/json", "content-type": "application/json"} + return self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.bulk_put_role", + path_parts=__path_parts, + ) + @_rewrite_parameters( body_fields=("password", "password_hash"), ) @@ -436,11 +552,11 @@ def create_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates an API key for access without requiring basic authentication. A successful - request returns a JSON structure that contains the API key, its unique id, and - its name. If applicable, it also returns expiration information for the API key - in milliseconds. NOTE: By default, API keys never expire. You can specify expiration - information when you create the API keys. + Create an API key. Creates an API key for access without requiring basic authentication. + A successful request returns a JSON structure that contains the API key, its + unique id, and its name. If applicable, it also returns expiration information + for the API key in milliseconds. NOTE: By default, API keys never expire. You + can specify expiration information when you create the API keys. ``_ @@ -1109,10 +1225,11 @@ def get_api_key( with_profile_uid: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for one or more API keys. NOTE: If you have only the `manage_own_api_key` - privilege, this API returns only the API keys that you own. If you have `read_security`, - `manage_api_key` or greater privileges (including `manage_security`), this API - returns all API keys regardless of ownership. + Get API key information. Retrieves information for one or more API keys. NOTE: + If you have only the `manage_own_api_key` privilege, this API returns only the + API keys that you own. If you have `read_security`, `manage_api_key` or greater + privileges (including `manage_security`), this API returns all API keys regardless + of ownership. ``_ @@ -1789,7 +1906,7 @@ def has_privileges( cluster: t.Optional[ t.Sequence[ t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_connector_secrets', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", + "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", str, ] ] @@ -1802,7 +1919,8 @@ def has_privileges( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Determines whether the specified user has a specified list of privileges. + Check user privileges. Determines whether the specified user has a specified + list of privileges. ``_ @@ -1921,13 +2039,13 @@ def invalidate_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Invalidates one or more API keys. The `manage_api_key` privilege allows deleting - any API keys. The `manage_own_api_key` only allows deleting API keys that are - owned by the user. In addition, with the `manage_own_api_key` privilege, an invalidation - request must be issued in one of the three formats: - Set the parameter `owner=true`. - - Or, set both `username` and `realm_name` to match the user’s identity. - Or, - if the request is issued by an API key, i.e. an API key invalidates itself, specify - its ID in the `ids` field. + Invalidate API keys. Invalidates one or more API keys. The `manage_api_key` privilege + allows deleting any API keys. The `manage_own_api_key` only allows deleting API + keys that are owned by the user. In addition, with the `manage_own_api_key` privilege, + an invalidation request must be issued in one of the three formats: - Set the + parameter `owner=true`. - Or, set both `username` and `realm_name` to match the + user’s identity. - Or, if the request is issued by an API key, i.e. an API key + invalidates itself, specify its ID in the `ids` field. ``_ @@ -2102,6 +2220,7 @@ def put_privileges( body_fields=( "applications", "cluster", + "description", "global_", "indices", "metadata", @@ -2118,11 +2237,12 @@ def put_role( cluster: t.Optional[ t.Sequence[ t.Union[ - "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_connector_secrets', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", + "t.Literal['all', 'cancel_task', 'create_snapshot', 'cross_cluster_replication', 'cross_cluster_search', 'delegate_pki', 'grant_api_key', 'manage', 'manage_api_key', 'manage_autoscaling', 'manage_behavioral_analytics', 'manage_ccr', 'manage_data_frame_transforms', 'manage_data_stream_global_retention', 'manage_enrich', 'manage_ilm', 'manage_index_templates', 'manage_inference', 'manage_ingest_pipelines', 'manage_logstash_pipelines', 'manage_ml', 'manage_oidc', 'manage_own_api_key', 'manage_pipeline', 'manage_rollup', 'manage_saml', 'manage_search_application', 'manage_search_query_rules', 'manage_search_synonyms', 'manage_security', 'manage_service_account', 'manage_slm', 'manage_token', 'manage_transform', 'manage_user_profile', 'manage_watcher', 'monitor', 'monitor_data_frame_transforms', 'monitor_data_stream_global_retention', 'monitor_enrich', 'monitor_inference', 'monitor_ml', 'monitor_rollup', 'monitor_snapshot', 'monitor_text_structure', 'monitor_transform', 'monitor_watcher', 'none', 'post_behavioral_analytics_event', 'read_ccr', 'read_fleet_secrets', 'read_ilm', 'read_pipeline', 'read_security', 'read_slm', 'transport_client', 'write_connector_secrets', 'write_fleet_secrets']", str, ] ] ] = None, + description: t.Optional[str] = None, error_trace: t.Optional[bool] = None, filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, global_: t.Optional[t.Mapping[str, t.Any]] = None, @@ -2148,6 +2268,7 @@ def put_role( :param applications: A list of application privilege entries. :param cluster: A list of cluster privileges. These privileges define the cluster-level actions for users with this role. + :param description: Optional description of the role descriptor :param global_: An object defining global privileges. A global privilege is a form of cluster privilege that is request-aware. Support for global privileges is currently limited to the management of application privileges. @@ -2189,6 +2310,8 @@ def put_role( __body["applications"] = applications if cluster is not None: __body["cluster"] = cluster + if description is not None: + __body["description"] = description if global_ is not None: __body["global"] = global_ if indices is not None: @@ -2425,8 +2548,8 @@ def query_api_keys( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves information for API keys in a paginated manner. You can optionally - filter the results with a query. + Query API keys. Retrieves a paginated list of API keys and their information. + You can optionally filter the results with a query. ``_ @@ -2526,6 +2649,181 @@ def query_api_keys( path_parts=__path_parts, ) + @_rewrite_parameters( + body_fields=("from_", "query", "search_after", "size", "sort"), + parameter_aliases={"from": "from_"}, + ) + def query_role( + self, + *, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + from_: t.Optional[int] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + query: t.Optional[t.Mapping[str, t.Any]] = None, + search_after: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, + size: t.Optional[int] = None, + sort: t.Optional[ + t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], + t.Union[str, t.Mapping[str, t.Any]], + ] + ] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Retrieves roles in a paginated manner. You can optionally filter the results + with a query. + + ``_ + + :param from_: Starting document offset. By default, you cannot page through more + than 10,000 hits using the from and size parameters. To page through more + hits, use the `search_after` parameter. + :param query: A query to filter which roles to return. If the query parameter + is missing, it is equivalent to a `match_all` query. The query supports a + subset of query types, including `match_all`, `bool`, `term`, `terms`, `match`, + `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. + You can query the following information associated with roles: `name`, `description`, + `metadata`, `applications.application`, `applications.privileges`, `applications.resources`. + :param search_after: Search after definition + :param size: The number of hits to return. By default, you cannot page through + more than 10,000 hits using the `from` and `size` parameters. To page through + more hits, use the `search_after` parameter. + :param sort: All public fields of a role are eligible for sorting. In addition, + sort can also be applied to the `_doc` field to sort by index order. + """ + __path_parts: t.Dict[str, str] = {} + __path = "/_security/_query/role" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if not __body: + if from_ is not None: + __body["from"] = from_ + if query is not None: + __body["query"] = query + if search_after is not None: + __body["search_after"] = search_after + if size is not None: + __body["size"] = size + if sort is not None: + __body["sort"] = sort + if not __body: + __body = None # type: ignore[assignment] + __headers = {"accept": "application/json"} + if __body is not None: + __headers["content-type"] = "application/json" + return self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.query_role", + path_parts=__path_parts, + ) + + @_rewrite_parameters( + body_fields=("from_", "query", "search_after", "size", "sort"), + parameter_aliases={"from": "from_"}, + ) + def query_user( + self, + *, + error_trace: t.Optional[bool] = None, + filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None, + from_: t.Optional[int] = None, + human: t.Optional[bool] = None, + pretty: t.Optional[bool] = None, + query: t.Optional[t.Mapping[str, t.Any]] = None, + search_after: t.Optional[ + t.Sequence[t.Union[None, bool, float, int, str, t.Any]] + ] = None, + size: t.Optional[int] = None, + sort: t.Optional[ + t.Union[ + t.Sequence[t.Union[str, t.Mapping[str, t.Any]]], + t.Union[str, t.Mapping[str, t.Any]], + ] + ] = None, + with_profile_uid: t.Optional[bool] = None, + body: t.Optional[t.Dict[str, t.Any]] = None, + ) -> ObjectApiResponse[t.Any]: + """ + Retrieves information for Users in a paginated manner. You can optionally filter + the results with a query. + + ``_ + + :param from_: Starting document offset. By default, you cannot page through more + than 10,000 hits using the from and size parameters. To page through more + hits, use the `search_after` parameter. + :param query: A query to filter which users to return. If the query parameter + is missing, it is equivalent to a `match_all` query. The query supports a + subset of query types, including `match_all`, `bool`, `term`, `terms`, `match`, + `ids`, `prefix`, `wildcard`, `exists`, `range`, and `simple_query_string`. + You can query the following information associated with user: `username`, + `roles`, `enabled` + :param search_after: Search after definition + :param size: The number of hits to return. By default, you cannot page through + more than 10,000 hits using the `from` and `size` parameters. To page through + more hits, use the `search_after` parameter. + :param sort: Fields eligible for sorting are: username, roles, enabled In addition, + sort can also be applied to the `_doc` field to sort by index order. + :param with_profile_uid: If true will return the User Profile ID for the users + in the query result, if any. + """ + __path_parts: t.Dict[str, str] = {} + __path = "/_security/_query/user" + __query: t.Dict[str, t.Any] = {} + __body: t.Dict[str, t.Any] = body if body is not None else {} + if error_trace is not None: + __query["error_trace"] = error_trace + if filter_path is not None: + __query["filter_path"] = filter_path + if human is not None: + __query["human"] = human + if pretty is not None: + __query["pretty"] = pretty + if with_profile_uid is not None: + __query["with_profile_uid"] = with_profile_uid + if not __body: + if from_ is not None: + __body["from"] = from_ + if query is not None: + __body["query"] = query + if search_after is not None: + __body["search_after"] = search_after + if size is not None: + __body["size"] = size + if sort is not None: + __body["sort"] = sort + if not __body: + __body = None # type: ignore[assignment] + __headers = {"accept": "application/json"} + if __body is not None: + __headers["content-type"] = "application/json" + return self.perform_request( # type: ignore[return-value] + "POST", + __path, + params=__query, + headers=__headers, + body=__body, + endpoint_id="security.query_user", + path_parts=__path_parts, + ) + @_rewrite_parameters( body_fields=("content", "ids", "realm"), ) @@ -2965,22 +3263,22 @@ def update_api_key( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates attributes of an existing API key. Users can only update API keys that - they created or that were granted to them. Use this API to update API keys created - by the create API Key or grant API Key APIs. If you need to apply the same update - to many API keys, you can use bulk update API Keys to reduce overhead. It’s not - possible to update expired API keys, or API keys that have been invalidated by - invalidate API Key. This API supports updates to an API key’s access scope and - metadata. The access scope of an API key is derived from the `role_descriptors` - you specify in the request, and a snapshot of the owner user’s permissions at - the time of the request. The snapshot of the owner’s permissions is updated automatically - on every call. If you don’t specify `role_descriptors` in the request, a call - to this API might still change the API key’s access scope. This change can occur - if the owner user’s permissions have changed since the API key was created or - last modified. To update another user’s API key, use the `run_as` feature to - submit a request on behalf of another user. IMPORTANT: It’s not possible to use - an API key as the authentication credential for this API. To update an API key, - the owner user’s credentials are required. + Update an API key. Updates attributes of an existing API key. Users can only + update API keys that they created or that were granted to them. Use this API + to update API keys created by the create API Key or grant API Key APIs. If you + need to apply the same update to many API keys, you can use bulk update API Keys + to reduce overhead. It’s not possible to update expired API keys, or API keys + that have been invalidated by invalidate API Key. This API supports updates to + an API key’s access scope and metadata. The access scope of an API key is derived + from the `role_descriptors` you specify in the request, and a snapshot of the + owner user’s permissions at the time of the request. The snapshot of the owner’s + permissions is updated automatically on every call. If you don’t specify `role_descriptors` + in the request, a call to this API might still change the API key’s access scope. + This change can occur if the owner user’s permissions have changed since the + API key was created or last modified. To update another user’s API key, use the + `run_as` feature to submit a request on behalf of another user. IMPORTANT: It’s + not possible to use an API key as the authentication credential for this API. + To update an API key, the owner user’s credentials are required. ``_ diff --git a/elasticsearch/_sync/client/tasks.py b/elasticsearch/_sync/client/tasks.py index 4db15d817f..de52506fb0 100644 --- a/elasticsearch/_sync/client/tasks.py +++ b/elasticsearch/_sync/client/tasks.py @@ -99,7 +99,8 @@ def get( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Returns information about a task. + Get task information. Returns information about the tasks currently executing + in the cluster. ``_ diff --git a/elasticsearch/_sync/client/transform.py b/elasticsearch/_sync/client/transform.py index 6eac80a301..913aa89888 100644 --- a/elasticsearch/_sync/client/transform.py +++ b/elasticsearch/_sync/client/transform.py @@ -39,7 +39,7 @@ def delete_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Deletes a transform. + Delete a transform. Deletes a transform. ``_ @@ -99,7 +99,7 @@ def get_transform( size: t.Optional[int] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves configuration information for transforms. + Get transforms. Retrieves configuration information for transforms. ``_ @@ -168,7 +168,7 @@ def get_transform_stats( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Retrieves usage information for transforms. + Get transform stats. Retrieves usage information for transforms. ``_ @@ -249,10 +249,12 @@ def preview_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Previews a transform. It returns a maximum of 100 results. The calculations are - based on all the current data in the source index. It also generates a list of - mappings and settings for the destination index. These values are determined - based on the field types of the source index and the transform aggregations. + Preview a transform. Generates a preview of the results that you will get when + you create a transform with the same configuration. It returns a maximum of 100 + results. The calculations are based on all the current data in the source index. + It also generates a list of mappings and settings for the destination index. + These values are determined based on the field types of the source index and + the transform aggregations. ``_ @@ -369,26 +371,27 @@ def put_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Creates a transform. A transform copies data from source indices, transforms - it, and persists it into an entity-centric destination index. You can also think - of the destination index as a two-dimensional tabular data structure (known as - a data frame). The ID for each document in the data frame is generated from a - hash of the entity, so there is a unique row per entity. You must choose either - the latest or pivot method for your transform; you cannot use both in a single - transform. If you choose to use the pivot method for your transform, the entities - are defined by the set of `group_by` fields in the pivot object. If you choose - to use the latest method, the entities are defined by the `unique_key` field - values in the latest object. You must have `create_index`, `index`, and `read` - privileges on the destination index and `read` and `view_index_metadata` privileges - on the source indices. When Elasticsearch security features are enabled, the - transform remembers which roles the user that created it had at the time of creation - and uses those same roles. If those roles do not have the required privileges - on the source and destination indices, the transform fails when it attempts unauthorized - operations. NOTE: You must use Kibana or this API to create a transform. Do not - add a transform directly into any `.transform-internal*` indices using the Elasticsearch - index API. If Elasticsearch security features are enabled, do not give users - any privileges on `.transform-internal*` indices. If you used transforms prior - to 7.5, also do not give users any privileges on `.data-frame-internal*` indices. + Create a transform. Creates a transform. A transform copies data from source + indices, transforms it, and persists it into an entity-centric destination index. + You can also think of the destination index as a two-dimensional tabular data + structure (known as a data frame). The ID for each document in the data frame + is generated from a hash of the entity, so there is a unique row per entity. + You must choose either the latest or pivot method for your transform; you cannot + use both in a single transform. If you choose to use the pivot method for your + transform, the entities are defined by the set of `group_by` fields in the pivot + object. If you choose to use the latest method, the entities are defined by the + `unique_key` field values in the latest object. You must have `create_index`, + `index`, and `read` privileges on the destination index and `read` and `view_index_metadata` + privileges on the source indices. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. NOTE: You must use Kibana or this API to create + a transform. Do not add a transform directly into any `.transform-internal*` + indices using the Elasticsearch index API. If Elasticsearch security features + are enabled, do not give users any privileges on `.transform-internal*` indices. + If you used transforms prior to 7.5, also do not give users any privileges on + `.data-frame-internal*` indices. ``_ @@ -488,9 +491,9 @@ def reset_transform( pretty: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Resets a transform. Before you can reset it, you must stop it; alternatively, - use the `force` query parameter. If the destination index was created by the - transform, it is deleted. + Reset a transform. Resets a transform. Before you can reset it, you must stop + it; alternatively, use the `force` query parameter. If the destination index + was created by the transform, it is deleted. ``_ @@ -538,10 +541,11 @@ def schedule_now_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Schedules now a transform. If you _schedule_now a transform, it will process - the new data instantly, without waiting for the configured frequency interval. - After _schedule_now API is called, the transform will be processed again at now - + frequency unless _schedule_now API is called again in the meantime. + Schedule a transform to start now. Instantly runs a transform to process data. + If you _schedule_now a transform, it will process the new data instantly, without + waiting for the configured frequency interval. After _schedule_now API is called, + the transform will be processed again at now + frequency unless _schedule_now + API is called again in the meantime. ``_ @@ -588,23 +592,24 @@ def start_transform( timeout: t.Optional[t.Union["t.Literal[-1]", "t.Literal[0]", str]] = None, ) -> ObjectApiResponse[t.Any]: """ - Starts a transform. When you start a transform, it creates the destination index - if it does not already exist. The `number_of_shards` is set to `1` and the `auto_expand_replicas` - is set to `0-1`. If it is a pivot transform, it deduces the mapping definitions - for the destination index from the source indices and the transform aggregations. - If fields in the destination index are derived from scripts (as in the case of - `scripted_metric` or `bucket_script` aggregations), the transform uses dynamic - mappings unless an index template exists. If it is a latest transform, it does - not deduce mapping definitions; it uses dynamic mappings. To use explicit mappings, - create the destination index before you start the transform. Alternatively, you - can create an index template, though it does not affect the deduced mappings - in a pivot transform. When the transform starts, a series of validations occur - to ensure its success. If you deferred validation when you created the transform, - they occur when you start the transform—​with the exception of privilege checks. - When Elasticsearch security features are enabled, the transform remembers which - roles the user that created it had at the time of creation and uses those same - roles. If those roles do not have the required privileges on the source and destination - indices, the transform fails when it attempts unauthorized operations. + Start a transform. Starts a transform. When you start a transform, it creates + the destination index if it does not already exist. The `number_of_shards` is + set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot transform, + it deduces the mapping definitions for the destination index from the source + indices and the transform aggregations. If fields in the destination index are + derived from scripts (as in the case of `scripted_metric` or `bucket_script` + aggregations), the transform uses dynamic mappings unless an index template exists. + If it is a latest transform, it does not deduce mapping definitions; it uses + dynamic mappings. To use explicit mappings, create the destination index before + you start the transform. Alternatively, you can create an index template, though + it does not affect the deduced mappings in a pivot transform. When the transform + starts, a series of validations occur to ensure its success. If you deferred + validation when you created the transform, they occur when you start the transform—​with + the exception of privilege checks. When Elasticsearch security features are enabled, + the transform remembers which roles the user that created it had at the time + of creation and uses those same roles. If those roles do not have the required + privileges on the source and destination indices, the transform fails when it + attempts unauthorized operations. ``_ @@ -658,7 +663,7 @@ def stop_transform( wait_for_completion: t.Optional[bool] = None, ) -> ObjectApiResponse[t.Any]: """ - Stops one or more transforms. + Stop transforms. Stops one or more transforms. ``_ @@ -751,13 +756,14 @@ def update_transform( body: t.Optional[t.Dict[str, t.Any]] = None, ) -> ObjectApiResponse[t.Any]: """ - Updates certain properties of a transform. All updated properties except `description` - do not take effect until after the transform starts the next checkpoint, thus - there is data consistency in each checkpoint. To use this API, you must have - `read` and `view_index_metadata` privileges for the source indices. You must - also have `index` and `read` privileges for the destination index. When Elasticsearch - security features are enabled, the transform remembers which roles the user who - updated it had at the time of update and runs with those privileges. + Update a transform. Updates certain properties of a transform. All updated properties + except `description` do not take effect until after the transform starts the + next checkpoint, thus there is data consistency in each checkpoint. To use this + API, you must have `read` and `view_index_metadata` privileges for the source + indices. You must also have `index` and `read` privileges for the destination + index. When Elasticsearch security features are enabled, the transform remembers + which roles the user who updated it had at the time of update and runs with those + privileges. ``_