From 419e05bfd30532b7b2957ac201fd4573da658cdd Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Wed, 17 Feb 2021 19:30:55 +0100 Subject: [PATCH] [es] Generate code based in ES v7.11.1 --- Makefile.toml | 2 +- api_generator/rest_specs/cat.plugins.json | 5 - api_generator/rest_specs/eql.get_status.json | 31 -- api_generator/rest_specs/indices.close.json | 2 +- .../rest_specs/last_downloaded_version | 2 +- .../rest_specs/logstash.delete_pipeline.json | 28 -- .../rest_specs/logstash.get_pipeline.json | 28 -- .../rest_specs/logstash.put_pipeline.json | 34 -- ...cture.json => ml.find_file_structure.json} | 6 +- api_generator/rest_specs/search.json | 4 - .../searchable_snapshots.mount.json | 5 - .../searchable_snapshots.stats.json | 12 - .../rest_specs/snapshot.get_features.json | 29 -- elasticsearch/src/.generated.toml | 2 - elasticsearch/src/cat.rs | 9 - elasticsearch/src/eql.rs | 122 ----- elasticsearch/src/indices.rs | 2 +- elasticsearch/src/lib.rs | 2 - elasticsearch/src/logstash.rs | 464 ------------------ elasticsearch/src/ml.rs | 287 +++++++++++ elasticsearch/src/root/mod.rs | 10 - elasticsearch/src/searchable_snapshots.rs | 19 - elasticsearch/src/snapshot.rs | 125 ----- elasticsearch/src/text_structure.rs | 359 -------------- 24 files changed, 294 insertions(+), 1295 deletions(-) delete mode 100644 api_generator/rest_specs/eql.get_status.json delete mode 100644 api_generator/rest_specs/logstash.delete_pipeline.json delete mode 100644 api_generator/rest_specs/logstash.get_pipeline.json delete mode 100644 api_generator/rest_specs/logstash.put_pipeline.json rename api_generator/rest_specs/{text_structure.find_structure.json => ml.find_file_structure.json} (96%) delete mode 100644 api_generator/rest_specs/snapshot.get_features.json delete mode 100644 elasticsearch/src/logstash.rs delete mode 100644 elasticsearch/src/text_structure.rs diff --git a/Makefile.toml b/Makefile.toml index 8702554c..3810c148 100644 --- a/Makefile.toml +++ b/Makefile.toml @@ -3,7 +3,7 @@ default_to_workspace = false [env] # Determines the version of Elasticsearch docker container used -STACK_VERSION = { value = "7.x-SNAPSHOT", condition = { env_not_set = ["STACK_VERSION"] }} +STACK_VERSION = { value = "7.11.1", condition = { env_not_set = ["STACK_VERSION"] }} # Determines the distribution of docker container used. Either platinum or free TEST_SUITE = { value = "free", condition = { env_not_set = ["TEST_SUITE"] }} # Set publish flags to dry-run by default, to force user to explicitly define for publishing diff --git a/api_generator/rest_specs/cat.plugins.json b/api_generator/rest_specs/cat.plugins.json index 48635d2f..6dc5ce05 100644 --- a/api_generator/rest_specs/cat.plugins.json +++ b/api_generator/rest_specs/cat.plugins.json @@ -41,11 +41,6 @@ "description":"Return help information", "default":false }, - "include_bootstrap":{ - "type":"boolean", - "description":"Include bootstrap plugins in the response", - "default":false - }, "s":{ "type":"list", "description":"Comma-separated list of column names or column aliases to sort by" diff --git a/api_generator/rest_specs/eql.get_status.json b/api_generator/rest_specs/eql.get_status.json deleted file mode 100644 index be8a4398..00000000 --- a/api_generator/rest_specs/eql.get_status.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "eql.get_status": { - "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html", - "description": "Returns the status of a previously submitted async or stored Event Query Language (EQL) search" - }, - "stability": "stable", - "visibility": "public", - "headers": { - "accept": [ - "application/json" - ] - }, - "url": { - "paths": [ - { - "path": "/_eql/search/status/{id}", - "methods": [ - "GET" - ], - "parts": { - "id": { - "type": "string", - "description": "The async search ID" - } - } - } - ] - } - } -} diff --git a/api_generator/rest_specs/indices.close.json b/api_generator/rest_specs/indices.close.json index 00900819..0738216d 100644 --- a/api_generator/rest_specs/indices.close.json +++ b/api_generator/rest_specs/indices.close.json @@ -56,7 +56,7 @@ }, "wait_for_active_shards":{ "type":"string", - "description":"Sets the number of active shards to wait for before the operation returns. Set to `index-setting` to wait according to the index setting `index.write.wait_for_active_shards`, or `all` to wait for all shards, or an integer. Defaults to `0`." + "description":"Sets the number of active shards to wait for before the operation returns." } } } diff --git a/api_generator/rest_specs/last_downloaded_version b/api_generator/rest_specs/last_downloaded_version index e28dc5ba..e0a0f4c8 100644 --- a/api_generator/rest_specs/last_downloaded_version +++ b/api_generator/rest_specs/last_downloaded_version @@ -1 +1 @@ -7.x \ No newline at end of file +v7.11.1 \ No newline at end of file diff --git a/api_generator/rest_specs/logstash.delete_pipeline.json b/api_generator/rest_specs/logstash.delete_pipeline.json deleted file mode 100644 index 8650f5f7..00000000 --- a/api_generator/rest_specs/logstash.delete_pipeline.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "logstash.delete_pipeline":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-delete-pipeline.html", - "description":"Deletes Logstash Pipelines used by Central Management" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_logstash/pipeline/{id}", - "methods":[ "DELETE" ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the Pipeline" - } - } - } - ] - }, - "params":{} - } -} diff --git a/api_generator/rest_specs/logstash.get_pipeline.json b/api_generator/rest_specs/logstash.get_pipeline.json deleted file mode 100644 index 061e49e0..00000000 --- a/api_generator/rest_specs/logstash.get_pipeline.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "logstash.get_pipeline":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-get-pipeline.html", - "description":"Retrieves Logstash Pipelines used by Central Management" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_logstash/pipeline/{id}", - "methods":[ "GET" ], - "parts":{ - "id":{ - "type":"string", - "description":"A comma-separated list of Pipeline IDs" - } - } - } - ] - }, - "params":{} - } -} diff --git a/api_generator/rest_specs/logstash.put_pipeline.json b/api_generator/rest_specs/logstash.put_pipeline.json deleted file mode 100644 index e8ec9b0d..00000000 --- a/api_generator/rest_specs/logstash.put_pipeline.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "logstash.put_pipeline":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-put-pipeline.html", - "description":"Adds and updates Logstash Pipelines used for Central Management" - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"], - "content_type": ["application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_logstash/pipeline/{id}", - "methods":[ "PUT" ], - "parts":{ - "id":{ - "type":"string", - "description":"The ID of the Pipeline" - } - } - } - ] - }, - "params":{ - }, - "body":{ - "description":"The Pipeline to add or update", - "required":true - } - } -} diff --git a/api_generator/rest_specs/text_structure.find_structure.json b/api_generator/rest_specs/ml.find_file_structure.json similarity index 96% rename from api_generator/rest_specs/text_structure.find_structure.json rename to api_generator/rest_specs/ml.find_file_structure.json index 934bc785..1eae145d 100644 --- a/api_generator/rest_specs/text_structure.find_structure.json +++ b/api_generator/rest_specs/ml.find_file_structure.json @@ -1,7 +1,7 @@ { - "text_structure.find_structure":{ + "ml.find_file_structure":{ "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/find-structure.html", + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-find-file-structure.html", "description":"Finds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch." }, "stability":"experimental", @@ -13,7 +13,7 @@ "url":{ "paths":[ { - "path":"/_text_structure/find_structure", + "path":"/_ml/find_file_structure", "methods":[ "POST" ] diff --git a/api_generator/rest_specs/search.json b/api_generator/rest_specs/search.json index d127b188..4db4f0e3 100644 --- a/api_generator/rest_specs/search.json +++ b/api_generator/rest_specs/search.json @@ -248,10 +248,6 @@ "type":"boolean", "description":"Indicates whether hits.total should be rendered as an integer or an object in the rest search response", "default":false - }, - "min_compatible_shard_node":{ - "type":"string", - "description":"The minimum compatible version that all shards involved in search should have for this request to be successful" } }, "body":{ diff --git a/api_generator/rest_specs/searchable_snapshots.mount.json b/api_generator/rest_specs/searchable_snapshots.mount.json index 61cff6a6..a7a127fe 100644 --- a/api_generator/rest_specs/searchable_snapshots.mount.json +++ b/api_generator/rest_specs/searchable_snapshots.mount.json @@ -39,11 +39,6 @@ "type":"boolean", "description":"Should this request wait until the operation has completed before returning", "default":false - }, - "storage":{ - "type":"string", - "description":"Selects the kind of local storage used to accelerate searches. Experimental, and defaults to `full_copy`", - "default":false } }, "body":{ diff --git a/api_generator/rest_specs/searchable_snapshots.stats.json b/api_generator/rest_specs/searchable_snapshots.stats.json index 23c6953f..2063d417 100644 --- a/api_generator/rest_specs/searchable_snapshots.stats.json +++ b/api_generator/rest_specs/searchable_snapshots.stats.json @@ -30,18 +30,6 @@ } } ] - }, - "params": { - "level":{ - "type":"enum", - "description":"Return stats aggregated at cluster, index or shard level", - "options":[ - "cluster", - "indices", - "shards" - ], - "default":"indices" - } } } } diff --git a/api_generator/rest_specs/snapshot.get_features.json b/api_generator/rest_specs/snapshot.get_features.json deleted file mode 100644 index 76b340d3..00000000 --- a/api_generator/rest_specs/snapshot.get_features.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "snapshot.get_features":{ - "documentation":{ - "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-snapshots.html", - "description":"Returns a list of features which can be snapshotted in this cluster." - }, - "stability":"stable", - "visibility":"public", - "headers":{ - "accept": [ "application/json"] - }, - "url":{ - "paths":[ - { - "path":"/_snapshottable_features", - "methods":[ - "GET" - ] - } - ] - }, - "params":{ - "master_timeout":{ - "type":"time", - "description":"Explicit operation timeout for connection to master node" - } - } - } -} diff --git a/elasticsearch/src/.generated.toml b/elasticsearch/src/.generated.toml index 06ea6570..c083c5c2 100644 --- a/elasticsearch/src/.generated.toml +++ b/elasticsearch/src/.generated.toml @@ -12,7 +12,6 @@ written = [ 'indices.rs', 'ingest.rs', 'license.rs', - 'logstash.rs', 'migration.rs', 'ml.rs', 'monitoring.rs', @@ -26,7 +25,6 @@ written = [ 'sql.rs', 'ssl.rs', 'tasks.rs', - 'text_structure.rs', 'transform.rs', 'watcher.rs', 'xpack.rs', diff --git a/elasticsearch/src/cat.rs b/elasticsearch/src/cat.rs index eae6dbe5..eb73dcc4 100644 --- a/elasticsearch/src/cat.rs +++ b/elasticsearch/src/cat.rs @@ -3041,7 +3041,6 @@ pub struct CatPlugins<'a, 'b> { headers: HeaderMap, help: Option, human: Option, - include_bootstrap: Option, local: Option, master_timeout: Option<&'b str>, pretty: Option, @@ -3066,7 +3065,6 @@ impl<'a, 'b> CatPlugins<'a, 'b> { h: None, help: None, human: None, - include_bootstrap: None, local: None, master_timeout: None, pretty: None, @@ -3111,11 +3109,6 @@ impl<'a, 'b> CatPlugins<'a, 'b> { self.human = Some(human); self } - #[doc = "Include bootstrap plugins in the response"] - pub fn include_bootstrap(mut self, include_bootstrap: bool) -> Self { - self.include_bootstrap = Some(include_bootstrap); - self - } #[doc = "Return local information, do not retrieve the state from master node (default: false)"] pub fn local(mut self, local: bool) -> Self { self.local = Some(local); @@ -3169,7 +3162,6 @@ impl<'a, 'b> CatPlugins<'a, 'b> { h: Option<&'b [&'b str]>, help: Option, human: Option, - include_bootstrap: Option, local: Option, master_timeout: Option<&'b str>, pretty: Option, @@ -3185,7 +3177,6 @@ impl<'a, 'b> CatPlugins<'a, 'b> { h: self.h, help: self.help, human: self.human, - include_bootstrap: self.include_bootstrap, local: self.local, master_timeout: self.master_timeout, pretty: self.pretty, diff --git a/elasticsearch/src/eql.rs b/elasticsearch/src/eql.rs index 7c17d4f2..eb44a8d1 100644 --- a/elasticsearch/src/eql.rs +++ b/elasticsearch/src/eql.rs @@ -302,124 +302,6 @@ impl<'a, 'b> EqlGet<'a, 'b> { } } #[derive(Debug, Clone, PartialEq)] -#[doc = "API parts for the Eql Get Status API"] -pub enum EqlGetStatusParts<'b> { - #[doc = "Id"] - Id(&'b str), -} -impl<'b> EqlGetStatusParts<'b> { - #[doc = "Builds a relative URL path to the Eql Get Status API"] - pub fn url(self) -> Cow<'static, str> { - match self { - EqlGetStatusParts::Id(ref id) => { - let encoded_id: Cow = percent_encode(id.as_bytes(), PARTS_ENCODED).into(); - let mut p = String::with_capacity(20usize + encoded_id.len()); - p.push_str("/_eql/search/status/"); - p.push_str(encoded_id.as_ref()); - p.into() - } - } - } -} -#[doc = "Builder for the [Eql Get Status API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/eql-search-api.html)\n\nReturns the status of a previously submitted async or stored Event Query Language (EQL) search"] -#[derive(Clone, Debug)] -pub struct EqlGetStatus<'a, 'b> { - transport: &'a Transport, - parts: EqlGetStatusParts<'b>, - error_trace: Option, - filter_path: Option<&'b [&'b str]>, - headers: HeaderMap, - human: Option, - pretty: Option, - request_timeout: Option, - source: Option<&'b str>, -} -impl<'a, 'b> EqlGetStatus<'a, 'b> { - #[doc = "Creates a new instance of [EqlGetStatus] with the specified API parts"] - pub fn new(transport: &'a Transport, parts: EqlGetStatusParts<'b>) -> Self { - let headers = HeaderMap::new(); - EqlGetStatus { - transport, - parts, - headers, - error_trace: None, - filter_path: None, - human: None, - pretty: None, - request_timeout: None, - source: None, - } - } - #[doc = "Include the stack trace of returned errors."] - pub fn error_trace(mut self, error_trace: bool) -> Self { - self.error_trace = Some(error_trace); - self - } - #[doc = "A comma-separated list of filters used to reduce the response."] - pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self { - self.filter_path = Some(filter_path); - self - } - #[doc = "Adds a HTTP header"] - pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self { - self.headers.insert(key, value); - self - } - #[doc = "Return human readable values for statistics."] - pub fn human(mut self, human: bool) -> Self { - self.human = Some(human); - self - } - #[doc = "Pretty format the returned JSON response."] - pub fn pretty(mut self, pretty: bool) -> Self { - self.pretty = Some(pretty); - self - } - #[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."] - pub fn request_timeout(mut self, timeout: Duration) -> Self { - self.request_timeout = Some(timeout); - self - } - #[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."] - pub fn source(mut self, source: &'b str) -> Self { - self.source = Some(source); - self - } - #[doc = "Creates an asynchronous call to the Eql Get Status API that can be awaited"] - pub async fn send(self) -> Result { - let path = self.parts.url(); - let method = Method::Get; - let headers = self.headers; - let timeout = self.request_timeout; - let query_string = { - #[serde_with::skip_serializing_none] - #[derive(Serialize)] - struct QueryParams<'b> { - error_trace: Option, - #[serde(serialize_with = "crate::client::serialize_coll_qs")] - filter_path: Option<&'b [&'b str]>, - human: Option, - pretty: Option, - source: Option<&'b str>, - } - let query_params = QueryParams { - error_trace: self.error_trace, - filter_path: self.filter_path, - human: self.human, - pretty: self.pretty, - source: self.source, - }; - Some(query_params) - }; - let body = Option::<()>::None; - let response = self - .transport - .send(method, &path, headers, query_string.as_ref(), body, timeout) - .await?; - Ok(response) - } -} -#[derive(Debug, Clone, PartialEq)] #[doc = "API parts for the Eql Search API"] pub enum EqlSearchParts<'b> { #[doc = "Index"] @@ -615,10 +497,6 @@ impl<'a> Eql<'a> { pub fn get<'b>(&'a self, parts: EqlGetParts<'b>) -> EqlGet<'a, 'b> { EqlGet::new(self.transport(), parts) } - #[doc = "[Eql Get Status API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/eql-search-api.html)\n\nReturns the status of a previously submitted async or stored Event Query Language (EQL) search"] - pub fn get_status<'b>(&'a self, parts: EqlGetStatusParts<'b>) -> EqlGetStatus<'a, 'b> { - EqlGetStatus::new(self.transport(), parts) - } #[doc = "[Eql Search API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/eql-search-api.html)\n\nReturns results matching a query expressed in Event Query Language (EQL)"] pub fn search<'b>(&'a self, parts: EqlSearchParts<'b>) -> EqlSearch<'a, 'b, ()> { EqlSearch::new(self.transport(), parts) diff --git a/elasticsearch/src/indices.rs b/elasticsearch/src/indices.rs index ce9c27fb..98039d81 100644 --- a/elasticsearch/src/indices.rs +++ b/elasticsearch/src/indices.rs @@ -963,7 +963,7 @@ where self.timeout = Some(timeout); self } - #[doc = "Sets the number of active shards to wait for before the operation returns. Set to `index-setting` to wait according to the index setting `index.write.wait_for_active_shards`, or `all` to wait for all shards, or an integer. Defaults to `0`."] + #[doc = "Sets the number of active shards to wait for before the operation returns."] pub fn wait_for_active_shards(mut self, wait_for_active_shards: &'b str) -> Self { self.wait_for_active_shards = Some(wait_for_active_shards); self diff --git a/elasticsearch/src/lib.rs b/elasticsearch/src/lib.rs index 6f03ffd1..dd8b9fd5 100644 --- a/elasticsearch/src/lib.rs +++ b/elasticsearch/src/lib.rs @@ -374,7 +374,6 @@ pub mod ilm; pub mod indices; pub mod ingest; pub mod license; -pub mod logstash; pub mod migration; pub mod ml; pub mod monitoring; @@ -387,7 +386,6 @@ pub mod snapshot; pub mod sql; pub mod ssl; pub mod tasks; -pub mod text_structure; pub mod transform; pub mod watcher; pub mod xpack; diff --git a/elasticsearch/src/logstash.rs b/elasticsearch/src/logstash.rs deleted file mode 100644 index de45d945..00000000 --- a/elasticsearch/src/logstash.rs +++ /dev/null @@ -1,464 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// ----------------------------------------------- -// This file is generated, Please do not edit it manually. -// Run the following in the root of the repo to regenerate: -// -// cargo make generate-api -// ----------------------------------------------- - -//! Logstash APIs -//! -//! The [Logstash APIs](https://www.elastic.co/guide/en/elasticsearch/reference/master/logstash-apis.html) are used to -//! manage pipelines used by Logstash Central Management. - -#![allow(unused_imports)] -use crate::{ - client::Elasticsearch, - error::Error, - http::{ - headers::{HeaderMap, HeaderName, HeaderValue, ACCEPT, CONTENT_TYPE}, - request::{Body, JsonBody, NdBody, PARTS_ENCODED}, - response::Response, - transport::Transport, - Method, - }, - params::*, -}; -use percent_encoding::percent_encode; -use serde::Serialize; -use std::{borrow::Cow, time::Duration}; -#[derive(Debug, Clone, PartialEq)] -#[doc = "API parts for the Logstash Delete Pipeline API"] -pub enum LogstashDeletePipelineParts<'b> { - #[doc = "Id"] - Id(&'b str), -} -impl<'b> LogstashDeletePipelineParts<'b> { - #[doc = "Builds a relative URL path to the Logstash Delete Pipeline API"] - pub fn url(self) -> Cow<'static, str> { - match self { - LogstashDeletePipelineParts::Id(ref id) => { - let encoded_id: Cow = percent_encode(id.as_bytes(), PARTS_ENCODED).into(); - let mut p = String::with_capacity(20usize + encoded_id.len()); - p.push_str("/_logstash/pipeline/"); - p.push_str(encoded_id.as_ref()); - p.into() - } - } - } -} -#[doc = "Builder for the [Logstash Delete Pipeline API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/logstash-api-delete-pipeline.html)\n\nDeletes Logstash Pipelines used by Central Management"] -#[derive(Clone, Debug)] -pub struct LogstashDeletePipeline<'a, 'b> { - transport: &'a Transport, - parts: LogstashDeletePipelineParts<'b>, - error_trace: Option, - filter_path: Option<&'b [&'b str]>, - headers: HeaderMap, - human: Option, - pretty: Option, - request_timeout: Option, - source: Option<&'b str>, -} -impl<'a, 'b> LogstashDeletePipeline<'a, 'b> { - #[doc = "Creates a new instance of [LogstashDeletePipeline] with the specified API parts"] - pub fn new(transport: &'a Transport, parts: LogstashDeletePipelineParts<'b>) -> Self { - let headers = HeaderMap::new(); - LogstashDeletePipeline { - transport, - parts, - headers, - error_trace: None, - filter_path: None, - human: None, - pretty: None, - request_timeout: None, - source: None, - } - } - #[doc = "Include the stack trace of returned errors."] - pub fn error_trace(mut self, error_trace: bool) -> Self { - self.error_trace = Some(error_trace); - self - } - #[doc = "A comma-separated list of filters used to reduce the response."] - pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self { - self.filter_path = Some(filter_path); - self - } - #[doc = "Adds a HTTP header"] - pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self { - self.headers.insert(key, value); - self - } - #[doc = "Return human readable values for statistics."] - pub fn human(mut self, human: bool) -> Self { - self.human = Some(human); - self - } - #[doc = "Pretty format the returned JSON response."] - pub fn pretty(mut self, pretty: bool) -> Self { - self.pretty = Some(pretty); - self - } - #[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."] - pub fn request_timeout(mut self, timeout: Duration) -> Self { - self.request_timeout = Some(timeout); - self - } - #[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."] - pub fn source(mut self, source: &'b str) -> Self { - self.source = Some(source); - self - } - #[doc = "Creates an asynchronous call to the Logstash Delete Pipeline API that can be awaited"] - pub async fn send(self) -> Result { - let path = self.parts.url(); - let method = Method::Delete; - let headers = self.headers; - let timeout = self.request_timeout; - let query_string = { - #[serde_with::skip_serializing_none] - #[derive(Serialize)] - struct QueryParams<'b> { - error_trace: Option, - #[serde(serialize_with = "crate::client::serialize_coll_qs")] - filter_path: Option<&'b [&'b str]>, - human: Option, - pretty: Option, - source: Option<&'b str>, - } - let query_params = QueryParams { - error_trace: self.error_trace, - filter_path: self.filter_path, - human: self.human, - pretty: self.pretty, - source: self.source, - }; - Some(query_params) - }; - let body = Option::<()>::None; - let response = self - .transport - .send(method, &path, headers, query_string.as_ref(), body, timeout) - .await?; - Ok(response) - } -} -#[derive(Debug, Clone, PartialEq)] -#[doc = "API parts for the Logstash Get Pipeline API"] -pub enum LogstashGetPipelineParts<'b> { - #[doc = "Id"] - Id(&'b str), -} -impl<'b> LogstashGetPipelineParts<'b> { - #[doc = "Builds a relative URL path to the Logstash Get Pipeline API"] - pub fn url(self) -> Cow<'static, str> { - match self { - LogstashGetPipelineParts::Id(ref id) => { - let encoded_id: Cow = percent_encode(id.as_bytes(), PARTS_ENCODED).into(); - let mut p = String::with_capacity(20usize + encoded_id.len()); - p.push_str("/_logstash/pipeline/"); - p.push_str(encoded_id.as_ref()); - p.into() - } - } - } -} -#[doc = "Builder for the [Logstash Get Pipeline API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/logstash-api-get-pipeline.html)\n\nRetrieves Logstash Pipelines used by Central Management"] -#[derive(Clone, Debug)] -pub struct LogstashGetPipeline<'a, 'b> { - transport: &'a Transport, - parts: LogstashGetPipelineParts<'b>, - error_trace: Option, - filter_path: Option<&'b [&'b str]>, - headers: HeaderMap, - human: Option, - pretty: Option, - request_timeout: Option, - source: Option<&'b str>, -} -impl<'a, 'b> LogstashGetPipeline<'a, 'b> { - #[doc = "Creates a new instance of [LogstashGetPipeline] with the specified API parts"] - pub fn new(transport: &'a Transport, parts: LogstashGetPipelineParts<'b>) -> Self { - let headers = HeaderMap::new(); - LogstashGetPipeline { - transport, - parts, - headers, - error_trace: None, - filter_path: None, - human: None, - pretty: None, - request_timeout: None, - source: None, - } - } - #[doc = "Include the stack trace of returned errors."] - pub fn error_trace(mut self, error_trace: bool) -> Self { - self.error_trace = Some(error_trace); - self - } - #[doc = "A comma-separated list of filters used to reduce the response."] - pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self { - self.filter_path = Some(filter_path); - self - } - #[doc = "Adds a HTTP header"] - pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self { - self.headers.insert(key, value); - self - } - #[doc = "Return human readable values for statistics."] - pub fn human(mut self, human: bool) -> Self { - self.human = Some(human); - self - } - #[doc = "Pretty format the returned JSON response."] - pub fn pretty(mut self, pretty: bool) -> Self { - self.pretty = Some(pretty); - self - } - #[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."] - pub fn request_timeout(mut self, timeout: Duration) -> Self { - self.request_timeout = Some(timeout); - self - } - #[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."] - pub fn source(mut self, source: &'b str) -> Self { - self.source = Some(source); - self - } - #[doc = "Creates an asynchronous call to the Logstash Get Pipeline API that can be awaited"] - pub async fn send(self) -> Result { - let path = self.parts.url(); - let method = Method::Get; - let headers = self.headers; - let timeout = self.request_timeout; - let query_string = { - #[serde_with::skip_serializing_none] - #[derive(Serialize)] - struct QueryParams<'b> { - error_trace: Option, - #[serde(serialize_with = "crate::client::serialize_coll_qs")] - filter_path: Option<&'b [&'b str]>, - human: Option, - pretty: Option, - source: Option<&'b str>, - } - let query_params = QueryParams { - error_trace: self.error_trace, - filter_path: self.filter_path, - human: self.human, - pretty: self.pretty, - source: self.source, - }; - Some(query_params) - }; - let body = Option::<()>::None; - let response = self - .transport - .send(method, &path, headers, query_string.as_ref(), body, timeout) - .await?; - Ok(response) - } -} -#[derive(Debug, Clone, PartialEq)] -#[doc = "API parts for the Logstash Put Pipeline API"] -pub enum LogstashPutPipelineParts<'b> { - #[doc = "Id"] - Id(&'b str), -} -impl<'b> LogstashPutPipelineParts<'b> { - #[doc = "Builds a relative URL path to the Logstash Put Pipeline API"] - pub fn url(self) -> Cow<'static, str> { - match self { - LogstashPutPipelineParts::Id(ref id) => { - let encoded_id: Cow = percent_encode(id.as_bytes(), PARTS_ENCODED).into(); - let mut p = String::with_capacity(20usize + encoded_id.len()); - p.push_str("/_logstash/pipeline/"); - p.push_str(encoded_id.as_ref()); - p.into() - } - } - } -} -#[doc = "Builder for the [Logstash Put Pipeline API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/logstash-api-put-pipeline.html)\n\nAdds and updates Logstash Pipelines used for Central Management"] -#[derive(Clone, Debug)] -pub struct LogstashPutPipeline<'a, 'b, B> { - transport: &'a Transport, - parts: LogstashPutPipelineParts<'b>, - body: Option, - error_trace: Option, - filter_path: Option<&'b [&'b str]>, - headers: HeaderMap, - human: Option, - pretty: Option, - request_timeout: Option, - source: Option<&'b str>, -} -impl<'a, 'b, B> LogstashPutPipeline<'a, 'b, B> -where - B: Body, -{ - #[doc = "Creates a new instance of [LogstashPutPipeline] with the specified API parts"] - pub fn new(transport: &'a Transport, parts: LogstashPutPipelineParts<'b>) -> Self { - let headers = HeaderMap::new(); - LogstashPutPipeline { - transport, - parts, - headers, - body: None, - error_trace: None, - filter_path: None, - human: None, - pretty: None, - request_timeout: None, - source: None, - } - } - #[doc = "The body for the API call"] - pub fn body(self, body: T) -> LogstashPutPipeline<'a, 'b, JsonBody> - where - T: Serialize, - { - LogstashPutPipeline { - transport: self.transport, - parts: self.parts, - body: Some(body.into()), - error_trace: self.error_trace, - filter_path: self.filter_path, - headers: self.headers, - human: self.human, - pretty: self.pretty, - request_timeout: self.request_timeout, - source: self.source, - } - } - #[doc = "Include the stack trace of returned errors."] - pub fn error_trace(mut self, error_trace: bool) -> Self { - self.error_trace = Some(error_trace); - self - } - #[doc = "A comma-separated list of filters used to reduce the response."] - pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self { - self.filter_path = Some(filter_path); - self - } - #[doc = "Adds a HTTP header"] - pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self { - self.headers.insert(key, value); - self - } - #[doc = "Return human readable values for statistics."] - pub fn human(mut self, human: bool) -> Self { - self.human = Some(human); - self - } - #[doc = "Pretty format the returned JSON response."] - pub fn pretty(mut self, pretty: bool) -> Self { - self.pretty = Some(pretty); - self - } - #[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."] - pub fn request_timeout(mut self, timeout: Duration) -> Self { - self.request_timeout = Some(timeout); - self - } - #[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."] - pub fn source(mut self, source: &'b str) -> Self { - self.source = Some(source); - self - } - #[doc = "Creates an asynchronous call to the Logstash Put Pipeline API that can be awaited"] - pub async fn send(self) -> Result { - let path = self.parts.url(); - let method = Method::Put; - let headers = self.headers; - let timeout = self.request_timeout; - let query_string = { - #[serde_with::skip_serializing_none] - #[derive(Serialize)] - struct QueryParams<'b> { - error_trace: Option, - #[serde(serialize_with = "crate::client::serialize_coll_qs")] - filter_path: Option<&'b [&'b str]>, - human: Option, - pretty: Option, - source: Option<&'b str>, - } - let query_params = QueryParams { - error_trace: self.error_trace, - filter_path: self.filter_path, - human: self.human, - pretty: self.pretty, - source: self.source, - }; - Some(query_params) - }; - let body = self.body; - let response = self - .transport - .send(method, &path, headers, query_string.as_ref(), body, timeout) - .await?; - Ok(response) - } -} -#[doc = "Namespace client for Logstash APIs"] -pub struct Logstash<'a> { - transport: &'a Transport, -} -impl<'a> Logstash<'a> { - #[doc = "Creates a new instance of [Logstash]"] - pub fn new(transport: &'a Transport) -> Self { - Self { transport } - } - pub fn transport(&self) -> &Transport { - self.transport - } - #[doc = "[Logstash Delete Pipeline API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/logstash-api-delete-pipeline.html)\n\nDeletes Logstash Pipelines used by Central Management"] - pub fn delete_pipeline<'b>( - &'a self, - parts: LogstashDeletePipelineParts<'b>, - ) -> LogstashDeletePipeline<'a, 'b> { - LogstashDeletePipeline::new(self.transport(), parts) - } - #[doc = "[Logstash Get Pipeline API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/logstash-api-get-pipeline.html)\n\nRetrieves Logstash Pipelines used by Central Management"] - pub fn get_pipeline<'b>( - &'a self, - parts: LogstashGetPipelineParts<'b>, - ) -> LogstashGetPipeline<'a, 'b> { - LogstashGetPipeline::new(self.transport(), parts) - } - #[doc = "[Logstash Put Pipeline API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/logstash-api-put-pipeline.html)\n\nAdds and updates Logstash Pipelines used for Central Management"] - pub fn put_pipeline<'b>( - &'a self, - parts: LogstashPutPipelineParts<'b>, - ) -> LogstashPutPipeline<'a, 'b, ()> { - LogstashPutPipeline::new(self.transport(), parts) - } -} -impl Elasticsearch { - #[doc = "Creates a namespace client for Logstash APIs"] - pub fn logstash(&self) -> Logstash { - Logstash::new(self.transport()) - } -} diff --git a/elasticsearch/src/ml.rs b/elasticsearch/src/ml.rs index 150ba5dd..318759bf 100644 --- a/elasticsearch/src/ml.rs +++ b/elasticsearch/src/ml.rs @@ -2119,6 +2119,287 @@ where Ok(response) } } +#[cfg(feature = "experimental-apis")] +#[derive(Debug, Clone, PartialEq)] +#[doc = "API parts for the Ml Find File Structure API"] +pub enum MlFindFileStructureParts { + #[doc = "No parts"] + None, +} +#[cfg(feature = "experimental-apis")] +impl MlFindFileStructureParts { + #[doc = "Builds a relative URL path to the Ml Find File Structure API"] + pub fn url(self) -> Cow<'static, str> { + match self { + MlFindFileStructureParts::None => "/_ml/find_file_structure".into(), + } + } +} +#[doc = "Builder for the [Ml Find File Structure API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/ml-find-file-structure.html)\n\nFinds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch."] +#[doc = " \n# Optional, experimental\nThis requires the `experimental-apis` feature. Can have breaking changes in future\nversions or might even be removed entirely.\n "] +#[cfg(feature = "experimental-apis")] +#[derive(Clone, Debug)] +pub struct MlFindFileStructure<'a, 'b, B> { + transport: &'a Transport, + parts: MlFindFileStructureParts, + body: Option, + charset: Option<&'b str>, + column_names: Option<&'b [&'b str]>, + delimiter: Option<&'b str>, + error_trace: Option, + explain: Option, + filter_path: Option<&'b [&'b str]>, + format: Option, + grok_pattern: Option<&'b str>, + has_header_row: Option, + headers: HeaderMap, + human: Option, + line_merge_size_limit: Option, + lines_to_sample: Option, + pretty: Option, + quote: Option<&'b str>, + request_timeout: Option, + should_trim_fields: Option, + source: Option<&'b str>, + timeout: Option<&'b str>, + timestamp_field: Option<&'b str>, + timestamp_format: Option<&'b str>, +} +#[cfg(feature = "experimental-apis")] +impl<'a, 'b, B> MlFindFileStructure<'a, 'b, B> +where + B: Body, +{ + #[doc = "Creates a new instance of [MlFindFileStructure]"] + pub fn new(transport: &'a Transport) -> Self { + let headers = HeaderMap::new(); + MlFindFileStructure { + transport, + parts: MlFindFileStructureParts::None, + headers, + body: None, + charset: None, + column_names: None, + delimiter: None, + error_trace: None, + explain: None, + filter_path: None, + format: None, + grok_pattern: None, + has_header_row: None, + human: None, + line_merge_size_limit: None, + lines_to_sample: None, + pretty: None, + quote: None, + request_timeout: None, + should_trim_fields: None, + source: None, + timeout: None, + timestamp_field: None, + timestamp_format: None, + } + } + #[doc = "The body for the API call"] + pub fn body(self, body: Vec) -> MlFindFileStructure<'a, 'b, NdBody> + where + T: Body, + { + MlFindFileStructure { + transport: self.transport, + parts: self.parts, + body: Some(NdBody(body)), + charset: self.charset, + column_names: self.column_names, + delimiter: self.delimiter, + error_trace: self.error_trace, + explain: self.explain, + filter_path: self.filter_path, + format: self.format, + grok_pattern: self.grok_pattern, + has_header_row: self.has_header_row, + headers: self.headers, + human: self.human, + line_merge_size_limit: self.line_merge_size_limit, + lines_to_sample: self.lines_to_sample, + pretty: self.pretty, + quote: self.quote, + request_timeout: self.request_timeout, + should_trim_fields: self.should_trim_fields, + source: self.source, + timeout: self.timeout, + timestamp_field: self.timestamp_field, + timestamp_format: self.timestamp_format, + } + } + #[doc = "Optional parameter to specify the character set of the file"] + pub fn charset(mut self, charset: &'b str) -> Self { + self.charset = Some(charset); + self + } + #[doc = "Optional parameter containing a comma separated list of the column names for a delimited file"] + pub fn column_names(mut self, column_names: &'b [&'b str]) -> Self { + self.column_names = Some(column_names); + self + } + #[doc = "Optional parameter to specify the delimiter character for a delimited file - must be a single character"] + pub fn delimiter(mut self, delimiter: &'b str) -> Self { + self.delimiter = Some(delimiter); + self + } + #[doc = "Include the stack trace of returned errors."] + pub fn error_trace(mut self, error_trace: bool) -> Self { + self.error_trace = Some(error_trace); + self + } + #[doc = "Whether to include a commentary on how the structure was derived"] + pub fn explain(mut self, explain: bool) -> Self { + self.explain = Some(explain); + self + } + #[doc = "A comma-separated list of filters used to reduce the response."] + pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self { + self.filter_path = Some(filter_path); + self + } + #[doc = "Optional parameter to specify the high level file format"] + pub fn format(mut self, format: Format) -> Self { + self.format = Some(format); + self + } + #[doc = "Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi-structured text file"] + pub fn grok_pattern(mut self, grok_pattern: &'b str) -> Self { + self.grok_pattern = Some(grok_pattern); + self + } + #[doc = "Optional parameter to specify whether a delimited file includes the column names in its first row"] + pub fn has_header_row(mut self, has_header_row: bool) -> Self { + self.has_header_row = Some(has_header_row); + self + } + #[doc = "Adds a HTTP header"] + pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self { + self.headers.insert(key, value); + self + } + #[doc = "Return human readable values for statistics."] + pub fn human(mut self, human: bool) -> Self { + self.human = Some(human); + self + } + #[doc = "Maximum number of characters permitted in a single message when lines are merged to create messages."] + pub fn line_merge_size_limit(mut self, line_merge_size_limit: i32) -> Self { + self.line_merge_size_limit = Some(line_merge_size_limit); + self + } + #[doc = "How many lines of the file should be included in the analysis"] + pub fn lines_to_sample(mut self, lines_to_sample: i32) -> Self { + self.lines_to_sample = Some(lines_to_sample); + self + } + #[doc = "Pretty format the returned JSON response."] + pub fn pretty(mut self, pretty: bool) -> Self { + self.pretty = Some(pretty); + self + } + #[doc = "Optional parameter to specify the quote character for a delimited file - must be a single character"] + pub fn quote(mut self, quote: &'b str) -> Self { + self.quote = Some(quote); + self + } + #[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."] + pub fn request_timeout(mut self, timeout: Duration) -> Self { + self.request_timeout = Some(timeout); + self + } + #[doc = "Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them"] + pub fn should_trim_fields(mut self, should_trim_fields: bool) -> Self { + self.should_trim_fields = Some(should_trim_fields); + self + } + #[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."] + pub fn source(mut self, source: &'b str) -> Self { + self.source = Some(source); + self + } + #[doc = "Timeout after which the analysis will be aborted"] + pub fn timeout(mut self, timeout: &'b str) -> Self { + self.timeout = Some(timeout); + self + } + #[doc = "Optional parameter to specify the timestamp field in the file"] + pub fn timestamp_field(mut self, timestamp_field: &'b str) -> Self { + self.timestamp_field = Some(timestamp_field); + self + } + #[doc = "Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format"] + pub fn timestamp_format(mut self, timestamp_format: &'b str) -> Self { + self.timestamp_format = Some(timestamp_format); + self + } + #[doc = "Creates an asynchronous call to the Ml Find File Structure API that can be awaited"] + pub async fn send(self) -> Result { + let path = self.parts.url(); + let method = Method::Post; + let headers = self.headers; + let timeout = self.request_timeout; + let query_string = { + #[serde_with::skip_serializing_none] + #[derive(Serialize)] + struct QueryParams<'b> { + charset: Option<&'b str>, + #[serde(serialize_with = "crate::client::serialize_coll_qs")] + column_names: Option<&'b [&'b str]>, + delimiter: Option<&'b str>, + error_trace: Option, + explain: Option, + #[serde(serialize_with = "crate::client::serialize_coll_qs")] + filter_path: Option<&'b [&'b str]>, + format: Option, + grok_pattern: Option<&'b str>, + has_header_row: Option, + human: Option, + line_merge_size_limit: Option, + lines_to_sample: Option, + pretty: Option, + quote: Option<&'b str>, + should_trim_fields: Option, + source: Option<&'b str>, + timeout: Option<&'b str>, + timestamp_field: Option<&'b str>, + timestamp_format: Option<&'b str>, + } + let query_params = QueryParams { + charset: self.charset, + column_names: self.column_names, + delimiter: self.delimiter, + error_trace: self.error_trace, + explain: self.explain, + filter_path: self.filter_path, + format: self.format, + grok_pattern: self.grok_pattern, + has_header_row: self.has_header_row, + human: self.human, + line_merge_size_limit: self.line_merge_size_limit, + lines_to_sample: self.lines_to_sample, + pretty: self.pretty, + quote: self.quote, + should_trim_fields: self.should_trim_fields, + source: self.source, + timeout: self.timeout, + timestamp_field: self.timestamp_field, + timestamp_format: self.timestamp_format, + }; + Some(query_params) + }; + let body = self.body; + let response = self + .transport + .send(method, &path, headers, query_string.as_ref(), body, timeout) + .await?; + Ok(response) + } +} #[derive(Debug, Clone, PartialEq)] #[doc = "API parts for the Ml Flush Job API"] pub enum MlFlushJobParts<'b> { @@ -9612,6 +9893,12 @@ impl<'a> Ml<'a> { ) -> MlExplainDataFrameAnalytics<'a, 'b, ()> { MlExplainDataFrameAnalytics::new(self.transport(), parts) } + #[doc = "[Ml Find File Structure API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/ml-find-file-structure.html)\n\nFinds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch."] + #[doc = " \n# Optional, experimental\nThis requires the `experimental-apis` feature. Can have breaking changes in future\nversions or might even be removed entirely.\n "] + #[cfg(feature = "experimental-apis")] + pub fn find_file_structure<'b>(&'a self) -> MlFindFileStructure<'a, 'b, ()> { + MlFindFileStructure::new(self.transport()) + } #[doc = "[Ml Flush Job API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/ml-flush-job.html)\n\nForces any buffered data to be processed by the job."] pub fn flush_job<'b>(&'a self, parts: MlFlushJobParts<'b>) -> MlFlushJob<'a, 'b, ()> { MlFlushJob::new(self.transport(), parts) diff --git a/elasticsearch/src/root/mod.rs b/elasticsearch/src/root/mod.rs index 75bfbd2a..40773a9d 100644 --- a/elasticsearch/src/root/mod.rs +++ b/elasticsearch/src/root/mod.rs @@ -6888,7 +6888,6 @@ pub struct Search<'a, 'b, B> { ignore_unavailable: Option, lenient: Option, max_concurrent_shard_requests: Option, - min_compatible_shard_node: Option<&'b str>, pre_filter_shard_size: Option, preference: Option<&'b str>, pretty: Option, @@ -6950,7 +6949,6 @@ where ignore_unavailable: None, lenient: None, max_concurrent_shard_requests: None, - min_compatible_shard_node: None, pre_filter_shard_size: None, preference: None, pretty: None, @@ -7051,7 +7049,6 @@ where ignore_unavailable: self.ignore_unavailable, lenient: self.lenient, max_concurrent_shard_requests: self.max_concurrent_shard_requests, - min_compatible_shard_node: self.min_compatible_shard_node, pre_filter_shard_size: self.pre_filter_shard_size, preference: self.preference, pretty: self.pretty, @@ -7155,11 +7152,6 @@ where self.max_concurrent_shard_requests = Some(max_concurrent_shard_requests); self } - #[doc = "The minimum compatible version that all shards involved in search should have for this request to be successful"] - pub fn min_compatible_shard_node(mut self, min_compatible_shard_node: &'b str) -> Self { - self.min_compatible_shard_node = Some(min_compatible_shard_node); - self - } #[doc = "A threshold that enforces a pre-filter roundtrip to prefilter search shards based on query rewriting if the\u{a0}number of shards the search request expands to exceeds the threshold. This filter roundtrip can limit the number of shards significantly if for instance a shard can not match any documents based on its rewrite method ie. if date filters are mandatory to match but the shard bounds and the query are disjoint."] pub fn pre_filter_shard_size(mut self, pre_filter_shard_size: i64) -> Self { self.pre_filter_shard_size = Some(pre_filter_shard_size); @@ -7331,7 +7323,6 @@ where ignore_unavailable: Option, lenient: Option, max_concurrent_shard_requests: Option, - min_compatible_shard_node: Option<&'b str>, pre_filter_shard_size: Option, preference: Option<&'b str>, pretty: Option, @@ -7385,7 +7376,6 @@ where ignore_unavailable: self.ignore_unavailable, lenient: self.lenient, max_concurrent_shard_requests: self.max_concurrent_shard_requests, - min_compatible_shard_node: self.min_compatible_shard_node, pre_filter_shard_size: self.pre_filter_shard_size, preference: self.preference, pretty: self.pretty, diff --git a/elasticsearch/src/searchable_snapshots.rs b/elasticsearch/src/searchable_snapshots.rs index 951a7577..f4344739 100644 --- a/elasticsearch/src/searchable_snapshots.rs +++ b/elasticsearch/src/searchable_snapshots.rs @@ -289,7 +289,6 @@ pub struct SearchableSnapshotsMount<'a, 'b, B> { pretty: Option, request_timeout: Option, source: Option<&'b str>, - storage: Option<&'b str>, wait_for_completion: Option, } #[cfg(feature = "experimental-apis")] @@ -312,7 +311,6 @@ where pretty: None, request_timeout: None, source: None, - storage: None, wait_for_completion: None, } } @@ -333,7 +331,6 @@ where pretty: self.pretty, request_timeout: self.request_timeout, source: self.source, - storage: self.storage, wait_for_completion: self.wait_for_completion, } } @@ -377,11 +374,6 @@ where self.source = Some(source); self } - #[doc = "Selects the kind of local storage used to accelerate searches. Experimental, and defaults to `full_copy`"] - pub fn storage(mut self, storage: &'b str) -> Self { - self.storage = Some(storage); - self - } #[doc = "Should this request wait until the operation has completed before returning"] pub fn wait_for_completion(mut self, wait_for_completion: bool) -> Self { self.wait_for_completion = Some(wait_for_completion); @@ -404,7 +396,6 @@ where master_timeout: Option<&'b str>, pretty: Option, source: Option<&'b str>, - storage: Option<&'b str>, wait_for_completion: Option, } let query_params = QueryParams { @@ -414,7 +405,6 @@ where master_timeout: self.master_timeout, pretty: self.pretty, source: self.source, - storage: self.storage, wait_for_completion: self.wait_for_completion, }; Some(query_params) @@ -594,7 +584,6 @@ pub struct SearchableSnapshotsStats<'a, 'b> { filter_path: Option<&'b [&'b str]>, headers: HeaderMap, human: Option, - level: Option, pretty: Option, request_timeout: Option, source: Option<&'b str>, @@ -611,7 +600,6 @@ impl<'a, 'b> SearchableSnapshotsStats<'a, 'b> { error_trace: None, filter_path: None, human: None, - level: None, pretty: None, request_timeout: None, source: None, @@ -637,11 +625,6 @@ impl<'a, 'b> SearchableSnapshotsStats<'a, 'b> { self.human = Some(human); self } - #[doc = "Return stats aggregated at cluster, index or shard level"] - pub fn level(mut self, level: Level) -> Self { - self.level = Some(level); - self - } #[doc = "Pretty format the returned JSON response."] pub fn pretty(mut self, pretty: bool) -> Self { self.pretty = Some(pretty); @@ -671,7 +654,6 @@ impl<'a, 'b> SearchableSnapshotsStats<'a, 'b> { #[serde(serialize_with = "crate::client::serialize_coll_qs")] filter_path: Option<&'b [&'b str]>, human: Option, - level: Option, pretty: Option, source: Option<&'b str>, } @@ -679,7 +661,6 @@ impl<'a, 'b> SearchableSnapshotsStats<'a, 'b> { error_trace: self.error_trace, filter_path: self.filter_path, human: self.human, - level: self.level, pretty: self.pretty, source: self.source, }; diff --git a/elasticsearch/src/snapshot.rs b/elasticsearch/src/snapshot.rs index 19fda9a9..c2e7561b 100644 --- a/elasticsearch/src/snapshot.rs +++ b/elasticsearch/src/snapshot.rs @@ -1145,127 +1145,6 @@ impl<'a, 'b> SnapshotGet<'a, 'b> { } } #[derive(Debug, Clone, PartialEq)] -#[doc = "API parts for the Snapshot Get Features API"] -pub enum SnapshotGetFeaturesParts { - #[doc = "No parts"] - None, -} -impl SnapshotGetFeaturesParts { - #[doc = "Builds a relative URL path to the Snapshot Get Features API"] - pub fn url(self) -> Cow<'static, str> { - match self { - SnapshotGetFeaturesParts::None => "/_snapshottable_features".into(), - } - } -} -#[doc = "Builder for the [Snapshot Get Features API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/modules-snapshots.html)\n\nReturns a list of features which can be snapshotted in this cluster."] -#[derive(Clone, Debug)] -pub struct SnapshotGetFeatures<'a, 'b> { - transport: &'a Transport, - parts: SnapshotGetFeaturesParts, - error_trace: Option, - filter_path: Option<&'b [&'b str]>, - headers: HeaderMap, - human: Option, - master_timeout: Option<&'b str>, - pretty: Option, - request_timeout: Option, - source: Option<&'b str>, -} -impl<'a, 'b> SnapshotGetFeatures<'a, 'b> { - #[doc = "Creates a new instance of [SnapshotGetFeatures]"] - pub fn new(transport: &'a Transport) -> Self { - let headers = HeaderMap::new(); - SnapshotGetFeatures { - transport, - parts: SnapshotGetFeaturesParts::None, - headers, - error_trace: None, - filter_path: None, - human: None, - master_timeout: None, - pretty: None, - request_timeout: None, - source: None, - } - } - #[doc = "Include the stack trace of returned errors."] - pub fn error_trace(mut self, error_trace: bool) -> Self { - self.error_trace = Some(error_trace); - self - } - #[doc = "A comma-separated list of filters used to reduce the response."] - pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self { - self.filter_path = Some(filter_path); - self - } - #[doc = "Adds a HTTP header"] - pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self { - self.headers.insert(key, value); - self - } - #[doc = "Return human readable values for statistics."] - pub fn human(mut self, human: bool) -> Self { - self.human = Some(human); - self - } - #[doc = "Explicit operation timeout for connection to master node"] - pub fn master_timeout(mut self, master_timeout: &'b str) -> Self { - self.master_timeout = Some(master_timeout); - self - } - #[doc = "Pretty format the returned JSON response."] - pub fn pretty(mut self, pretty: bool) -> Self { - self.pretty = Some(pretty); - self - } - #[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."] - pub fn request_timeout(mut self, timeout: Duration) -> Self { - self.request_timeout = Some(timeout); - self - } - #[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."] - pub fn source(mut self, source: &'b str) -> Self { - self.source = Some(source); - self - } - #[doc = "Creates an asynchronous call to the Snapshot Get Features API that can be awaited"] - pub async fn send(self) -> Result { - let path = self.parts.url(); - let method = Method::Get; - let headers = self.headers; - let timeout = self.request_timeout; - let query_string = { - #[serde_with::skip_serializing_none] - #[derive(Serialize)] - struct QueryParams<'b> { - error_trace: Option, - #[serde(serialize_with = "crate::client::serialize_coll_qs")] - filter_path: Option<&'b [&'b str]>, - human: Option, - master_timeout: Option<&'b str>, - pretty: Option, - source: Option<&'b str>, - } - let query_params = QueryParams { - error_trace: self.error_trace, - filter_path: self.filter_path, - human: self.human, - master_timeout: self.master_timeout, - pretty: self.pretty, - source: self.source, - }; - Some(query_params) - }; - let body = Option::<()>::None; - let response = self - .transport - .send(method, &path, headers, query_string.as_ref(), body, timeout) - .await?; - Ok(response) - } -} -#[derive(Debug, Clone, PartialEq)] #[doc = "API parts for the Snapshot Get Repository API"] pub enum SnapshotGetRepositoryParts<'b> { #[doc = "No parts"] @@ -1946,10 +1825,6 @@ impl<'a> Snapshot<'a> { pub fn get<'b>(&'a self, parts: SnapshotGetParts<'b>) -> SnapshotGet<'a, 'b> { SnapshotGet::new(self.transport(), parts) } - #[doc = "[Snapshot Get Features API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/modules-snapshots.html)\n\nReturns a list of features which can be snapshotted in this cluster."] - pub fn get_features<'b>(&'a self) -> SnapshotGetFeatures<'a, 'b> { - SnapshotGetFeatures::new(self.transport()) - } #[doc = "[Snapshot Get Repository API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/modules-snapshots.html)\n\nReturns information about a repository."] pub fn get_repository<'b>( &'a self, diff --git a/elasticsearch/src/text_structure.rs b/elasticsearch/src/text_structure.rs deleted file mode 100644 index ee83d5eb..00000000 --- a/elasticsearch/src/text_structure.rs +++ /dev/null @@ -1,359 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -// ----------------------------------------------- -// This file is generated, Please do not edit it manually. -// Run the following in the root of the repo to regenerate: -// -// cargo make generate-api -// ----------------------------------------------- - -//! Text structure APIs -//! -//! Determines the structure of text and other information that will be useful to import its contents to an Elasticsearch -//! index. - -#![cfg(feature = "experimental-apis")] -#![doc = " \n# Optional, experimental\nThis requires the `experimental-apis` feature. Can have breaking changes in future\nversions or might even be removed entirely.\n "] -#![allow(unused_imports)] -use crate::{ - client::Elasticsearch, - error::Error, - http::{ - headers::{HeaderMap, HeaderName, HeaderValue, ACCEPT, CONTENT_TYPE}, - request::{Body, JsonBody, NdBody, PARTS_ENCODED}, - response::Response, - transport::Transport, - Method, - }, - params::*, -}; -use percent_encoding::percent_encode; -use serde::Serialize; -use std::{borrow::Cow, time::Duration}; -#[cfg(feature = "experimental-apis")] -#[derive(Debug, Clone, PartialEq)] -#[doc = "API parts for the Text Structure Find Structure API"] -pub enum TextStructureFindStructureParts { - #[doc = "No parts"] - None, -} -#[cfg(feature = "experimental-apis")] -impl TextStructureFindStructureParts { - #[doc = "Builds a relative URL path to the Text Structure Find Structure API"] - pub fn url(self) -> Cow<'static, str> { - match self { - TextStructureFindStructureParts::None => "/_text_structure/find_structure".into(), - } - } -} -#[doc = "Builder for the [Text Structure Find Structure API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/find-structure.html)\n\nFinds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch."] -#[doc = " \n# Optional, experimental\nThis requires the `experimental-apis` feature. Can have breaking changes in future\nversions or might even be removed entirely.\n "] -#[cfg(feature = "experimental-apis")] -#[derive(Clone, Debug)] -pub struct TextStructureFindStructure<'a, 'b, B> { - transport: &'a Transport, - parts: TextStructureFindStructureParts, - body: Option, - charset: Option<&'b str>, - column_names: Option<&'b [&'b str]>, - delimiter: Option<&'b str>, - error_trace: Option, - explain: Option, - filter_path: Option<&'b [&'b str]>, - format: Option, - grok_pattern: Option<&'b str>, - has_header_row: Option, - headers: HeaderMap, - human: Option, - line_merge_size_limit: Option, - lines_to_sample: Option, - pretty: Option, - quote: Option<&'b str>, - request_timeout: Option, - should_trim_fields: Option, - source: Option<&'b str>, - timeout: Option<&'b str>, - timestamp_field: Option<&'b str>, - timestamp_format: Option<&'b str>, -} -#[cfg(feature = "experimental-apis")] -impl<'a, 'b, B> TextStructureFindStructure<'a, 'b, B> -where - B: Body, -{ - #[doc = "Creates a new instance of [TextStructureFindStructure]"] - pub fn new(transport: &'a Transport) -> Self { - let headers = HeaderMap::new(); - TextStructureFindStructure { - transport, - parts: TextStructureFindStructureParts::None, - headers, - body: None, - charset: None, - column_names: None, - delimiter: None, - error_trace: None, - explain: None, - filter_path: None, - format: None, - grok_pattern: None, - has_header_row: None, - human: None, - line_merge_size_limit: None, - lines_to_sample: None, - pretty: None, - quote: None, - request_timeout: None, - should_trim_fields: None, - source: None, - timeout: None, - timestamp_field: None, - timestamp_format: None, - } - } - #[doc = "The body for the API call"] - pub fn body(self, body: Vec) -> TextStructureFindStructure<'a, 'b, NdBody> - where - T: Body, - { - TextStructureFindStructure { - transport: self.transport, - parts: self.parts, - body: Some(NdBody(body)), - charset: self.charset, - column_names: self.column_names, - delimiter: self.delimiter, - error_trace: self.error_trace, - explain: self.explain, - filter_path: self.filter_path, - format: self.format, - grok_pattern: self.grok_pattern, - has_header_row: self.has_header_row, - headers: self.headers, - human: self.human, - line_merge_size_limit: self.line_merge_size_limit, - lines_to_sample: self.lines_to_sample, - pretty: self.pretty, - quote: self.quote, - request_timeout: self.request_timeout, - should_trim_fields: self.should_trim_fields, - source: self.source, - timeout: self.timeout, - timestamp_field: self.timestamp_field, - timestamp_format: self.timestamp_format, - } - } - #[doc = "Optional parameter to specify the character set of the file"] - pub fn charset(mut self, charset: &'b str) -> Self { - self.charset = Some(charset); - self - } - #[doc = "Optional parameter containing a comma separated list of the column names for a delimited file"] - pub fn column_names(mut self, column_names: &'b [&'b str]) -> Self { - self.column_names = Some(column_names); - self - } - #[doc = "Optional parameter to specify the delimiter character for a delimited file - must be a single character"] - pub fn delimiter(mut self, delimiter: &'b str) -> Self { - self.delimiter = Some(delimiter); - self - } - #[doc = "Include the stack trace of returned errors."] - pub fn error_trace(mut self, error_trace: bool) -> Self { - self.error_trace = Some(error_trace); - self - } - #[doc = "Whether to include a commentary on how the structure was derived"] - pub fn explain(mut self, explain: bool) -> Self { - self.explain = Some(explain); - self - } - #[doc = "A comma-separated list of filters used to reduce the response."] - pub fn filter_path(mut self, filter_path: &'b [&'b str]) -> Self { - self.filter_path = Some(filter_path); - self - } - #[doc = "Optional parameter to specify the high level file format"] - pub fn format(mut self, format: Format) -> Self { - self.format = Some(format); - self - } - #[doc = "Optional parameter to specify the Grok pattern that should be used to extract fields from messages in a semi-structured text file"] - pub fn grok_pattern(mut self, grok_pattern: &'b str) -> Self { - self.grok_pattern = Some(grok_pattern); - self - } - #[doc = "Optional parameter to specify whether a delimited file includes the column names in its first row"] - pub fn has_header_row(mut self, has_header_row: bool) -> Self { - self.has_header_row = Some(has_header_row); - self - } - #[doc = "Adds a HTTP header"] - pub fn header(mut self, key: HeaderName, value: HeaderValue) -> Self { - self.headers.insert(key, value); - self - } - #[doc = "Return human readable values for statistics."] - pub fn human(mut self, human: bool) -> Self { - self.human = Some(human); - self - } - #[doc = "Maximum number of characters permitted in a single message when lines are merged to create messages."] - pub fn line_merge_size_limit(mut self, line_merge_size_limit: i32) -> Self { - self.line_merge_size_limit = Some(line_merge_size_limit); - self - } - #[doc = "How many lines of the file should be included in the analysis"] - pub fn lines_to_sample(mut self, lines_to_sample: i32) -> Self { - self.lines_to_sample = Some(lines_to_sample); - self - } - #[doc = "Pretty format the returned JSON response."] - pub fn pretty(mut self, pretty: bool) -> Self { - self.pretty = Some(pretty); - self - } - #[doc = "Optional parameter to specify the quote character for a delimited file - must be a single character"] - pub fn quote(mut self, quote: &'b str) -> Self { - self.quote = Some(quote); - self - } - #[doc = "Sets a request timeout for this API call.\n\nThe timeout is applied from when the request starts connecting until the response body has finished."] - pub fn request_timeout(mut self, timeout: Duration) -> Self { - self.request_timeout = Some(timeout); - self - } - #[doc = "Optional parameter to specify whether the values between delimiters in a delimited file should have whitespace trimmed from them"] - pub fn should_trim_fields(mut self, should_trim_fields: bool) -> Self { - self.should_trim_fields = Some(should_trim_fields); - self - } - #[doc = "The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests."] - pub fn source(mut self, source: &'b str) -> Self { - self.source = Some(source); - self - } - #[doc = "Timeout after which the analysis will be aborted"] - pub fn timeout(mut self, timeout: &'b str) -> Self { - self.timeout = Some(timeout); - self - } - #[doc = "Optional parameter to specify the timestamp field in the file"] - pub fn timestamp_field(mut self, timestamp_field: &'b str) -> Self { - self.timestamp_field = Some(timestamp_field); - self - } - #[doc = "Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format"] - pub fn timestamp_format(mut self, timestamp_format: &'b str) -> Self { - self.timestamp_format = Some(timestamp_format); - self - } - #[doc = "Creates an asynchronous call to the Text Structure Find Structure API that can be awaited"] - pub async fn send(self) -> Result { - let path = self.parts.url(); - let method = Method::Post; - let headers = self.headers; - let timeout = self.request_timeout; - let query_string = { - #[serde_with::skip_serializing_none] - #[derive(Serialize)] - struct QueryParams<'b> { - charset: Option<&'b str>, - #[serde(serialize_with = "crate::client::serialize_coll_qs")] - column_names: Option<&'b [&'b str]>, - delimiter: Option<&'b str>, - error_trace: Option, - explain: Option, - #[serde(serialize_with = "crate::client::serialize_coll_qs")] - filter_path: Option<&'b [&'b str]>, - format: Option, - grok_pattern: Option<&'b str>, - has_header_row: Option, - human: Option, - line_merge_size_limit: Option, - lines_to_sample: Option, - pretty: Option, - quote: Option<&'b str>, - should_trim_fields: Option, - source: Option<&'b str>, - timeout: Option<&'b str>, - timestamp_field: Option<&'b str>, - timestamp_format: Option<&'b str>, - } - let query_params = QueryParams { - charset: self.charset, - column_names: self.column_names, - delimiter: self.delimiter, - error_trace: self.error_trace, - explain: self.explain, - filter_path: self.filter_path, - format: self.format, - grok_pattern: self.grok_pattern, - has_header_row: self.has_header_row, - human: self.human, - line_merge_size_limit: self.line_merge_size_limit, - lines_to_sample: self.lines_to_sample, - pretty: self.pretty, - quote: self.quote, - should_trim_fields: self.should_trim_fields, - source: self.source, - timeout: self.timeout, - timestamp_field: self.timestamp_field, - timestamp_format: self.timestamp_format, - }; - Some(query_params) - }; - let body = self.body; - let response = self - .transport - .send(method, &path, headers, query_string.as_ref(), body, timeout) - .await?; - Ok(response) - } -} -#[doc = "Namespace client for TextStructure APIs"] -#[doc = " \n# Optional, experimental\nThis requires the `experimental-apis` feature. Can have breaking changes in future\nversions or might even be removed entirely.\n "] -#[cfg(feature = "experimental-apis")] -pub struct TextStructure<'a> { - transport: &'a Transport, -} -#[cfg(feature = "experimental-apis")] -impl<'a> TextStructure<'a> { - #[doc = "Creates a new instance of [TextStructure]"] - pub fn new(transport: &'a Transport) -> Self { - Self { transport } - } - pub fn transport(&self) -> &Transport { - self.transport - } - #[doc = "[Text Structure Find Structure API](https://www.elastic.co/guide/en/elasticsearch/reference/7.11/find-structure.html)\n\nFinds the structure of a text file. The text file must contain data that is suitable to be ingested into Elasticsearch."] - #[doc = " \n# Optional, experimental\nThis requires the `experimental-apis` feature. Can have breaking changes in future\nversions or might even be removed entirely.\n "] - #[cfg(feature = "experimental-apis")] - pub fn find_structure<'b>(&'a self) -> TextStructureFindStructure<'a, 'b, ()> { - TextStructureFindStructure::new(self.transport()) - } -} -#[cfg(feature = "experimental-apis")] -impl Elasticsearch { - #[doc = "Creates a namespace client for TextStructure APIs"] - pub fn text_structure(&self) -> TextStructure { - TextStructure::new(self.transport()) - } -}