diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java index 9511a6bc01e08..70e9fe424e77b 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/index/mapper/MapperServiceFactory.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.LowercaseNormalizer; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ProvidedIdFieldMapper; @@ -71,7 +72,8 @@ public static MapperService create(String mappings) { public T compile(Script script, ScriptContext scriptContext) { throw new UnsupportedOperationException(); } - } + }, + MapperMetrics.NOOP ); try { diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java index b6cbc3e7cce02..14f6fe6501a73 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/search/QueryParserHelperBenchmark.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; @@ -154,7 +155,8 @@ protected SearchExecutionContext buildSearchExecutionContext() { null, () -> true, null, - Collections.emptyMap() + Collections.emptyMap(), + MapperMetrics.NOOP ); } @@ -186,7 +188,8 @@ protected final MapperService createMapperService(String mappings) { public T compile(Script script, ScriptContext scriptContext) { throw new UnsupportedOperationException(); } - } + }, + MapperMetrics.NOOP ); try { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java index 24df3c4dab464..58b967d0a7722 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditTask.java @@ -60,6 +60,7 @@ import static org.gradle.api.JavaVersion.VERSION_20; import static org.gradle.api.JavaVersion.VERSION_21; import static org.gradle.api.JavaVersion.VERSION_22; +import static org.gradle.api.JavaVersion.VERSION_23; @CacheableTask public abstract class ThirdPartyAuditTask extends DefaultTask { @@ -336,8 +337,8 @@ private String runForbiddenAPIsCli() throws IOException { spec.setExecutable(javaHome.get() + "/bin/java"); } spec.classpath(getForbiddenAPIsClasspath(), classpath); - // Enable explicitly for each release as appropriate. Just JDK 20/21/22 for now, and just the vector module. - if (isJavaVersion(VERSION_20) || isJavaVersion(VERSION_21) || isJavaVersion(VERSION_22)) { + // Enable explicitly for each release as appropriate. Just JDK 20/21/22/23 for now, and just the vector module. + if (isJavaVersion(VERSION_20) || isJavaVersion(VERSION_21) || isJavaVersion(VERSION_22) || isJavaVersion(VERSION_23)) { spec.jvmArgs("--add-modules", "jdk.incubator.vector"); } spec.jvmArgs("-Xmx1g"); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 31e1cb882305a..999f27a646b1f 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -1107,11 +1107,11 @@ private void logFileContents(String description, Path from, boolean tailLogs) { return; } - boolean foundNettyLeaks = false; + boolean foundLeaks = false; for (String logLine : errorsAndWarnings.keySet()) { - if (logLine.contains("ResourceLeakDetector]")) { + if (logLine.contains("ResourceLeakDetector") || logLine.contains("LeakTracker")) { tailLogs = true; - foundNettyLeaks = true; + foundLeaks = true; break; } } @@ -1140,8 +1140,8 @@ private void logFileContents(String description, Path from, boolean tailLogs) { }); } } - if (foundNettyLeaks) { - throw new TestClustersException("Found Netty ByteBuf leaks in node logs."); + if (foundLeaks) { + throw new TestClustersException("Found resource leaks in node logs."); } } diff --git a/client/test/build.gradle b/client/test/build.gradle index d9a10a9c6ffdc..8d457948b91b4 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -27,9 +27,9 @@ dependencies { api "org.hamcrest:hamcrest:${versions.hamcrest}" // mockito - api 'org.mockito:mockito-core:5.9.0' - api 'org.mockito:mockito-subclass:5.9.0' - api 'net.bytebuddy:byte-buddy:1.14.11' + api 'org.mockito:mockito-core:5.11.0' + api 'org.mockito:mockito-subclass:5.11.0' + api 'net.bytebuddy:byte-buddy:1.14.12' api 'org.objenesis:objenesis:3.3' } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index 04079284b3ec9..f853304bcdf90 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -27,61 +27,64 @@ static List systemJvmOptions(Settings nodeSettings, final Map maybeWorkaroundG1Bug() { + Runtime.Version v = Runtime.version(); + if (v.feature() == 22 && v.update() <= 1) { + return Stream.of("-XX:+UnlockDiagnosticVMOptions", "-XX:G1NumCollectionsKeepPinned=10000000"); + } + return Stream.of(); + } + private static String findLibraryPath(Map sysprops) { // working dir is ES installation, so we use relative path here Path platformDir = Paths.get("lib", "platform"); diff --git a/docs/changelog/106486.yaml b/docs/changelog/106486.yaml new file mode 100644 index 0000000000000..b33df50780e02 --- /dev/null +++ b/docs/changelog/106486.yaml @@ -0,0 +1,17 @@ +pr: 106486 +summary: Create custom parser for ISO-8601 datetimes +area: Infra/Core +type: enhancement +issues: + - 102063 +highlight: + title: New custom parser for ISO-8601 datetimes + body: |- + This introduces a new custom parser for ISO-8601 datetimes, for the `iso8601`, `strict_date_optional_time`, and + `strict_date_optional_time_nanos` built-in date formats. This provides a performance improvement over the + default Java date-time parsing. Whilst it maintains much of the same behaviour, + the new parser does not accept nonsensical date-time strings that have multiple fractional seconds fields + or multiple timezone specifiers. If the new parser fails to parse a string, it will then use the previous parser + to parse it. If a large proportion of the input data consists of these invalid strings, this may cause + a small performance degradation. If you wish to force the use of the old parsers regardless, + set the JVM property `es.datetime.java_time_parsers=true` on all ES nodes. diff --git a/docs/changelog/108571.yaml b/docs/changelog/108571.yaml new file mode 100644 index 0000000000000..b863ac90d9e5f --- /dev/null +++ b/docs/changelog/108571.yaml @@ -0,0 +1,5 @@ +pr: 108571 +summary: Workaround G1 bug for JDK 22 and 22.0.1 +area: Infra/CLI +type: bug +issues: [] diff --git a/docs/changelog/108639.yaml b/docs/changelog/108639.yaml new file mode 100644 index 0000000000000..586270c3c761c --- /dev/null +++ b/docs/changelog/108639.yaml @@ -0,0 +1,28 @@ +pr: 108639 +summary: Add support for the 'Domain' database to the geoip processor +area: Ingest Node +type: enhancement +issues: [] +highlight: + title: Add support for the 'Domain' database to the geoip processor + body: |- + Follow on to #107287 and #107377 + + Adds support for the ['GeoIP2 + Domain'](https://dev.maxmind.com/geoip/docs/databases/domain) database + from MaxMind to the `geoip` processor. + + The `geoip` processor will automatically download the [various + 'GeoLite2' + databases](https://dev.maxmind.com/geoip/geolite2-free-geolocation-data), + but the 'GeoIP2 Domain' database is not a 'GeoLite2' database -- it's a + commercial database available to those with a suitable license from + MaxMind. + + The support that is being added for it in this PR is in line with the + support that we already have for MaxMind's 'GeoIP2 City' and 'GeoIP2 + Country' databases -- that is, one would need to arrange their own + download management via some custom endpoint or otherwise arrange for + the relevant file(s) to be in the `$ES_CONFIG/ingest-geoip` directory on + the nodes of the cluster. + notable: true diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index 1f73cd08401ee..c256b30060bf6 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -268,7 +268,7 @@ used for abuse detection. ===== `return_documents`:: (Optional, boolean) -For `cohere` service only. Specify whether to return doc text within the +For `cohere` service only. Specify whether to return doc text within the results. `top_n`:: @@ -307,16 +307,6 @@ For `openai` and `azureopenai` service only. Specifies the user issuing the request, which can be used for abuse detection. ===== -+ -.`task_settings` for the `completion` task type -[%collapsible%closed] -===== -`user`::: -(optional, string) -For `openai` service only. Specifies the user issuing the request, which can be used for abuse detection. -===== - - [discrete] [[put-inference-api-example]] ==== {api-examples-title} @@ -351,11 +341,11 @@ The following example shows how to create an {infer} endpoint called [source,console] ------------------------------------------------------------ -PUT _inference/rerank/cohere-rerank +PUT _inference/rerank/cohere-rerank { "service": "cohere", "service_settings": { - "api_key": "", + "api_key": "", "model_id": "rerank-english-v3.0" }, "task_settings": { @@ -366,7 +356,7 @@ PUT _inference/rerank/cohere-rerank ------------------------------------------------------------ // TEST[skip:TBD] -For more examples, also review the +For more examples, also review the https://docs.cohere.com/docs/elasticsearch-and-cohere#rerank-search-results-with-cohere-and-elasticsearch[Cohere documentation]. diff --git a/docs/reference/ingest/processors/geoip.asciidoc b/docs/reference/ingest/processors/geoip.asciidoc index 12e7a5f10135c..3348ae9cbfee9 100644 --- a/docs/reference/ingest/processors/geoip.asciidoc +++ b/docs/reference/ingest/processors/geoip.asciidoc @@ -59,6 +59,7 @@ in `properties`. * If the GeoIP2 Anonymous IP database is used, then the following fields may be added under the `target_field`: `ip`, `hosting_provider`, `tor_exit_node`, `anonymous_vpn`, `anonymous`, `public_proxy`, and `residential_proxy`. The fields actually added depend on what has been found and which properties were configured in `properties`. +* If the GeoIP2 Domain database is used, then the following fields may be added under the `target_field`: `ip`, and `domain`. * If the GeoIP2 Enterprise database is used, then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name`, `continent_name`, `region_iso_code`, `region_name`, `city_name`, `timezone`, `location`, `asn`, `organization_name`, `network`, `hosting_provider`, `tor_exit_node`, `anonymous_vpn`, `anonymous`, `public_proxy`, and `residential_proxy`. diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 000a2c15f5b12..00c6ec4bd4a2e 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1589,9 +1589,9 @@ - - - + + + @@ -3701,13 +3701,13 @@ - - - + + + - - + + diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java index 889b4c490d23f..5a9b00dde58cc 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/Database.java @@ -75,6 +75,7 @@ enum Database { Property.RESIDENTIAL_PROXY ) ), + Domain(Set.of(Property.IP, Property.DOMAIN), Set.of(Property.DOMAIN)), Enterprise( Set.of( Property.IP, @@ -94,7 +95,8 @@ enum Database { Property.ANONYMOUS_VPN, Property.ANONYMOUS, Property.PUBLIC_PROXY, - Property.RESIDENTIAL_PROXY + Property.RESIDENTIAL_PROXY, + Property.DOMAIN ), Set.of( Property.COUNTRY_ISO_CODE, @@ -111,6 +113,7 @@ enum Database { private static final String COUNTRY_DB_SUFFIX = "-Country"; private static final String ASN_DB_SUFFIX = "-ASN"; private static final String ANONYMOUS_IP_DB_SUFFIX = "-Anonymous-IP"; + private static final String DOMAIN_DB_SUFFIX = "-Domain"; private static final String ENTERPRISE_DB_SUFFIX = "-Enterprise"; /** @@ -133,6 +136,8 @@ public static Database getDatabase(final String databaseType, final String datab database = Database.Asn; } else if (databaseType.endsWith(Database.ANONYMOUS_IP_DB_SUFFIX)) { database = Database.AnonymousIp; + } else if (databaseType.endsWith(Database.DOMAIN_DB_SUFFIX)) { + database = Database.Domain; } else if (databaseType.endsWith(Database.ENTERPRISE_DB_SUFFIX)) { database = Database.Enterprise; } @@ -209,7 +214,8 @@ enum Property { ANONYMOUS_VPN, ANONYMOUS, PUBLIC_PROXY, - RESIDENTIAL_PROXY; + RESIDENTIAL_PROXY, + DOMAIN; /** * Parses a string representation of a property into an actual Property instance. Not all properties that exist are diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java index 12f6a299e1232..97b90f612ea92 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java @@ -16,6 +16,7 @@ import com.maxmind.geoip2.model.AsnResponse; import com.maxmind.geoip2.model.CityResponse; import com.maxmind.geoip2.model.CountryResponse; +import com.maxmind.geoip2.model.DomainResponse; import com.maxmind.geoip2.model.EnterpriseResponse; import org.apache.logging.log4j.LogManager; @@ -177,6 +178,12 @@ public AnonymousIpResponse getAnonymousIp(InetAddress ipAddress) { return getResponse(ipAddress, DatabaseReader::tryAnonymousIp); } + @Nullable + @Override + public DomainResponse getDomain(InetAddress ipAddress) { + return getResponse(ipAddress, DatabaseReader::tryDomain); + } + @Nullable @Override public EnterpriseResponse getEnterprise(InetAddress ipAddress) { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDatabase.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDatabase.java index 088fa2b0d1fa8..7cbd423a5f2e9 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDatabase.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDatabase.java @@ -12,6 +12,7 @@ import com.maxmind.geoip2.model.AsnResponse; import com.maxmind.geoip2.model.CityResponse; import com.maxmind.geoip2.model.CountryResponse; +import com.maxmind.geoip2.model.DomainResponse; import com.maxmind.geoip2.model.EnterpriseResponse; import org.elasticsearch.core.Nullable; @@ -58,6 +59,9 @@ public interface GeoIpDatabase { @Nullable AnonymousIpResponse getAnonymousIp(InetAddress ipAddress); + @Nullable + DomainResponse getDomain(InetAddress ipAddress); + @Nullable EnterpriseResponse getEnterprise(InetAddress ipAddress); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 6898e44335793..16485987176b7 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -13,6 +13,7 @@ import com.maxmind.geoip2.model.AsnResponse; import com.maxmind.geoip2.model.CityResponse; import com.maxmind.geoip2.model.CountryResponse; +import com.maxmind.geoip2.model.DomainResponse; import com.maxmind.geoip2.model.EnterpriseResponse; import com.maxmind.geoip2.record.City; import com.maxmind.geoip2.record.Continent; @@ -175,6 +176,7 @@ private Map getGeoData(GeoIpDatabase geoIpDatabase, String ip) t case Country -> retrieveCountryGeoData(geoIpDatabase, ipAddress); case Asn -> retrieveAsnGeoData(geoIpDatabase, ipAddress); case AnonymousIp -> retrieveAnonymousIpGeoData(geoIpDatabase, ipAddress); + case Domain -> retrieveDomainGeoData(geoIpDatabase, ipAddress); case Enterprise -> retrieveEnterpriseGeoData(geoIpDatabase, ipAddress); }; } @@ -384,6 +386,28 @@ private Map retrieveAnonymousIpGeoData(GeoIpDatabase geoIpDataba return geoData; } + private Map retrieveDomainGeoData(GeoIpDatabase geoIpDatabase, InetAddress ipAddress) { + DomainResponse response = geoIpDatabase.getDomain(ipAddress); + if (response == null) { + return Map.of(); + } + + String domain = response.getDomain(); + + Map geoData = new HashMap<>(); + for (Property property : this.properties) { + switch (property) { + case IP -> geoData.put("ip", NetworkAddress.format(ipAddress)); + case DOMAIN -> { + if (domain != null) { + geoData.put("domain", domain); + } + } + } + } + return geoData; + } + private Map retrieveEnterpriseGeoData(GeoIpDatabase geoIpDatabase, InetAddress ipAddress) { EnterpriseResponse response = geoIpDatabase.getEnterprise(ipAddress); if (response == null) { @@ -407,6 +431,8 @@ private Map retrieveEnterpriseGeoData(GeoIpDatabase geoIpDatabas boolean isPublicProxy = response.getTraits().isPublicProxy(); boolean isResidentialProxy = response.getTraits().isResidentialProxy(); + String domain = response.getTraits().getDomain(); + Map geoData = new HashMap<>(); for (Property property : this.properties) { switch (property) { @@ -500,6 +526,11 @@ private Map retrieveEnterpriseGeoData(GeoIpDatabase geoIpDatabas case RESIDENTIAL_PROXY -> { geoData.put("residential_proxy", isResidentialProxy); } + case DOMAIN -> { + if (domain != null) { + geoData.put("domain", domain); + } + } } } return geoData; diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index ec77cacbdb6b6..cd6737cced308 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -336,8 +336,36 @@ public void testAnonymmousIp() throws Exception { assertThat(geoData.get("residential_proxy"), equalTo(true)); } + public void testDomain() throws Exception { + String ip = "69.219.64.2"; + GeoIpProcessor processor = new GeoIpProcessor( + randomAlphaOfLength(10), + null, + "source_field", + loader("/GeoIP2-Domain-Test.mmdb"), + () -> true, + "target_field", + ALL_PROPERTIES, + false, + false, + "filename" + ); + + Map document = new HashMap<>(); + document.put("source_field", ip); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + processor.execute(ingestDocument); + + assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); + @SuppressWarnings("unchecked") + Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); + assertThat(geoData.size(), equalTo(2)); + assertThat(geoData.get("ip"), equalTo(ip)); + assertThat(geoData.get("domain"), equalTo("ameritech.net")); + } + public void testEnterprise() throws Exception { - String ip = "2.125.160.216"; + String ip = "74.209.24.4"; GeoIpProcessor processor = new GeoIpProcessor( randomAlphaOfLength(10), null, @@ -359,26 +387,29 @@ public void testEnterprise() throws Exception { assertThat(ingestDocument.getSourceAndMetadata().get("source_field"), equalTo(ip)); @SuppressWarnings("unchecked") Map geoData = (Map) ingestDocument.getSourceAndMetadata().get("target_field"); - assertThat(geoData.size(), equalTo(16)); + assertThat(geoData.size(), equalTo(19)); assertThat(geoData.get("ip"), equalTo(ip)); - assertThat(geoData.get("country_iso_code"), equalTo("GB")); - assertThat(geoData.get("country_name"), equalTo("United Kingdom")); - assertThat(geoData.get("continent_name"), equalTo("Europe")); - assertThat(geoData.get("region_iso_code"), equalTo("GB-WBK")); - assertThat(geoData.get("region_name"), equalTo("West Berkshire")); - assertThat(geoData.get("city_name"), equalTo("Boxford")); - assertThat(geoData.get("timezone"), equalTo("Europe/London")); + assertThat(geoData.get("country_iso_code"), equalTo("US")); + assertThat(geoData.get("country_name"), equalTo("United States")); + assertThat(geoData.get("continent_name"), equalTo("North America")); + assertThat(geoData.get("region_iso_code"), equalTo("US-NY")); + assertThat(geoData.get("region_name"), equalTo("New York")); + assertThat(geoData.get("city_name"), equalTo("Chatham")); + assertThat(geoData.get("timezone"), equalTo("America/New_York")); Map location = new HashMap<>(); - location.put("lat", 51.75); - location.put("lon", -1.25); + location.put("lat", 42.3478); + location.put("lon", -73.5549); assertThat(geoData.get("location"), equalTo(location)); - assertThat(geoData.get("network"), equalTo("2.125.160.216/29")); + assertThat(geoData.get("asn"), equalTo(14671L)); + assertThat(geoData.get("organization_name"), equalTo("FairPoint Communications")); + assertThat(geoData.get("network"), equalTo("74.209.16.0/20")); assertThat(geoData.get("hosting_provider"), equalTo(false)); assertThat(geoData.get("tor_exit_node"), equalTo(false)); assertThat(geoData.get("anonymous_vpn"), equalTo(false)); assertThat(geoData.get("anonymous"), equalTo(false)); assertThat(geoData.get("public_proxy"), equalTo(false)); assertThat(geoData.get("residential_proxy"), equalTo(false)); + assertThat(geoData.get("domain"), equalTo("frpt.net")); } public void testAddressIsNotInTheDatabase() throws Exception { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java index 4e6e1d11c0fdd..07ea7f59eb521 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/MaxMindSupportTests.java @@ -201,6 +201,9 @@ public class MaxMindSupportTests extends ESTestCase { "traits.userType" ); + private static final Set DOMAIN_SUPPORTED_FIELDS = Set.of("domain"); + private static final Set DOMAIN_UNSUPPORTED_FIELDS = Set.of("ipAddress", "network"); + private static final Set ENTERPRISE_SUPPORTED_FIELDS = Set.of( "city.name", "continent.name", @@ -215,6 +218,7 @@ public class MaxMindSupportTests extends ESTestCase { "traits.anonymousVpn", "traits.autonomousSystemNumber", "traits.autonomousSystemOrganization", + "traits.domain", "traits.hostingProvider", "traits.network", "traits.publicProxy", @@ -268,7 +272,6 @@ public class MaxMindSupportTests extends ESTestCase { "traits.anonymousProxy", "traits.anycast", "traits.connectionType", - "traits.domain", "traits.ipAddress", "traits.isp", "traits.legitimateProxy", @@ -290,6 +293,8 @@ public class MaxMindSupportTests extends ESTestCase { CITY_SUPPORTED_FIELDS, Database.Country, COUNTRY_SUPPORTED_FIELDS, + Database.Domain, + DOMAIN_SUPPORTED_FIELDS, Database.Enterprise, ENTERPRISE_SUPPORTED_FIELDS ); @@ -302,6 +307,8 @@ public class MaxMindSupportTests extends ESTestCase { CITY_UNSUPPORTED_FIELDS, Database.Country, COUNTRY_UNSUPPORTED_FIELDS, + Database.Domain, + DOMAIN_UNSUPPORTED_FIELDS, Database.Enterprise, ENTERPRISE_UNSUPPORTED_FIELDS ); @@ -314,13 +321,14 @@ public class MaxMindSupportTests extends ESTestCase { CityResponse.class, Database.Country, CountryResponse.class, + Database.Domain, + DomainResponse.class, Database.Enterprise, EnterpriseResponse.class ); private static final Set> KNOWN_UNSUPPORTED_RESPONSE_CLASSES = Set.of( ConnectionTypeResponse.class, - DomainResponse.class, IspResponse.class, IpRiskResponse.class ); diff --git a/modules/ingest-geoip/src/test/resources/GeoIP2-Domain-Test.mmdb b/modules/ingest-geoip/src/test/resources/GeoIP2-Domain-Test.mmdb new file mode 100644 index 0000000000000..d21c2a93df7d4 Binary files /dev/null and b/modules/ingest-geoip/src/test/resources/GeoIP2-Domain-Test.mmdb differ diff --git a/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java index 317bfa9edd1c9..275666eec5c42 100644 --- a/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java +++ b/modules/kibana/src/internalClusterTest/java/org/elasticsearch/kibana/KibanaThreadPoolIT.java @@ -9,30 +9,66 @@ package org.elasticsearch.kibana; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.indices.SystemIndexThreadPoolTestCase; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.concurrent.Phaser; import java.util.stream.Stream; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.startsWith; -public class KibanaThreadPoolIT extends SystemIndexThreadPoolTestCase { +/** + * Tests to verify that system indices are bypassing user-space thread pools + * + *

We can block thread pools by setting them to one thread and 1 element queue, then submitting + * threads that wait on a phaser. This lets us verify that operations on system indices + * are being directed to other thread pools.

+ */ +public class KibanaThreadPoolIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(IndexingPressure.MAX_INDEXING_BYTES.getKey(), "1KB") + .put("thread_pool.search.size", 1) + .put("thread_pool.search.queue_size", 1) + .put("thread_pool.write.size", 1) + .put("thread_pool.write.queue_size", 1) + .put("thread_pool.get.size", 1) + .put("thread_pool.get.queue_size", 1) + .build(); + } + + private static final String USER_INDEX = "user_index"; + // For system indices that use ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS, we'll want to + // block normal system index thread pools as well. + private static final Set THREAD_POOLS_TO_BLOCK = Set.of(ThreadPool.Names.GET, ThreadPool.Names.WRITE, ThreadPool.Names.SEARCH); @Override protected Collection> nodePlugins() { return Set.of(KibanaPlugin.class); } - public void testKibanaThreadPool() { + public void testKibanaThreadPoolByPassesBlockedThreadPools() throws Exception { List kibanaSystemIndices = Stream.of( KibanaPlugin.KIBANA_INDEX_DESCRIPTOR.getIndexPattern(), KibanaPlugin.REPORTING_INDEX_DESCRIPTOR.getIndexPattern(), @@ -61,4 +97,108 @@ public void testKibanaThreadPool() { } }); } + + public void testBlockedThreadPoolsRejectUserRequests() throws Exception { + assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); + + runWithBlockedThreadPools(this::assertThreadPoolsBlocked); + + assertAcked(client().admin().indices().prepareDelete(USER_INDEX)); + } + + private void assertThreadPoolsBlocked() { + + var e1 = expectThrows( + EsRejectedExecutionException.class, + () -> client().prepareIndex(USER_INDEX).setSource(Map.of("foo", "bar")).get() + ); + assertThat(e1.getMessage(), startsWith("rejected execution of TimedRunnable")); + var e2 = expectThrows(EsRejectedExecutionException.class, () -> client().prepareGet(USER_INDEX, "id").get()); + assertThat(e2.getMessage(), startsWith("rejected execution of ActionRunnable")); + var e3 = expectThrows( + SearchPhaseExecutionException.class, + () -> client().prepareSearch(USER_INDEX) + .setQuery(QueryBuilders.matchAllQuery()) + // Request times out if max concurrent shard requests is set to 1 + .setMaxConcurrentShardRequests(usually() ? SearchRequest.DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS : randomIntBetween(2, 10)) + .get() + ); + assertThat(e3.getMessage(), containsString("all shards failed")); + } + + protected void runWithBlockedThreadPools(Runnable runnable) throws Exception { + Phaser phaser = new Phaser(); + + // register this test's thread + phaser.register(); + + blockThreadPool(phaser); + phaser.arriveAndAwaitAdvance();// wait until all waitAction are executing + + fillQueues(); + + logger.debug("number of nodes " + internalCluster().getNodeNames().length); + logger.debug("number of parties arrived " + phaser.getArrivedParties()); + try { + runnable.run(); + } finally { + phaser.arriveAndAwaitAdvance(); // release all waitAction + } + } + + private void blockThreadPool(Phaser phaser) { + for (String nodeName : internalCluster().getNodeNames()) { + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + for (String threadPoolName : THREAD_POOLS_TO_BLOCK) { + blockThreadPool(threadPoolName, threadPool, phaser); + } + } + } + + private void fillQueues() { + for (String nodeName : internalCluster().getNodeNames()) { + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + for (String threadPoolName : THREAD_POOLS_TO_BLOCK) { + fillThreadPoolQueues(threadPoolName, threadPool); + } + } + } + + private static void blockThreadPool(String threadPoolName, ThreadPool threadPool, Phaser phaser) { + ThreadPool.Info info = threadPool.info(threadPoolName); + + Runnable waitAction = () -> { + phaser.arriveAndAwaitAdvance();// block until all are executed on a threadpool + phaser.arriveAndAwaitAdvance();// block until main thread has not finished + }; + + phaser.bulkRegister(info.getMax()); + + for (int i = 0; i < info.getMax(); i++) { + // we need to make sure that there is a task blocking a thread pool + // otherwise a queue might end up having a spot + do { + try { + threadPool.executor(threadPoolName).execute(waitAction); + break; + } catch (EsRejectedExecutionException e) { + // if exception was thrown when submitting, retry. + } + } while (true); + } + } + + private static void fillThreadPoolQueues(String threadPoolName, ThreadPool threadPool) { + ThreadPool.Info info = threadPool.info(threadPoolName); + + for (int i = 0; i < info.getQueueSize().singles(); i++) { + try { + threadPool.executor(threadPoolName).execute(() -> {}); + } catch (EsRejectedExecutionException e) { + // we can't be sure that some other task won't get queued in a test cluster + // but the threadpool's thread is already blocked + } + } + } + } diff --git a/muted-tests.yml b/muted-tests.yml index 210215a131339..e1e80a3d3459b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,6 +1,9 @@ tests: - class: "org.elasticsearch.xpack.transform.transforms.scheduling.MonotonicClockTests" issue: "https://github.com/elastic/elasticsearch/issues/108529" +- class: "org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilterTests" + issue: "https://github.com/elastic/elasticsearch/issues/108649" + method: "testManyRandomDocs" # Examples: # # Mute a single test case in a YAML test suite: diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java index eaf439f264ad5..d04c8802635d3 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java @@ -72,7 +72,7 @@ public void testDanglingIndicesCanBeListed() throws Exception { internalCluster().startNodes(3, buildSettings(0)); final DanglingIndexDetails danglingIndexDetails = createDanglingIndices(INDEX_NAME); - final String stoppedNodeId = mapNodeNameToId(danglingIndexDetails.stoppedNodeName); + final String stoppedNodeId = getNodeId(danglingIndexDetails.stoppedNodeName); final RestClient restClient = getRestClient(); @@ -163,7 +163,12 @@ public void testDanglingIndicesCanBeDeleted() throws Exception { // tombstone has been pushed out of the graveyard. createIndex("additional"); deleteIndex("additional"); - assertThat(listDanglingIndexIds(), is(empty())); + // reading dangling index metadata happens without the all shard locks + // (as we do not know the index name from the index directory structure). + // As a result the index directory could be updated or deleted in the meanwhile by any concurrent operation + // and result in the node request failure that is going to be propagated to the API call. + // Since dandling index API is a best effort we expect such failures to be retried on the client level. + assertBusy(() -> assertThat(listDanglingIndexIds(), is(empty()))); } private List listDanglingIndexIds() throws IOException { @@ -171,15 +176,14 @@ private List listDanglingIndexIds() throws IOException { assertOK(response); final XContentTestUtils.JsonMapView mapView = createJsonMapView(response.getEntity().getContent()); + logger.warn("dangling API response: {}", mapView); assertThat(mapView.get("_nodes.total"), equalTo(3)); assertThat(mapView.get("_nodes.successful"), equalTo(3)); assertThat(mapView.get("_nodes.failed"), equalTo(0)); List indices = mapView.get("dangling_indices"); - List danglingIndexIds = new ArrayList<>(); - for (int i = 0; i < indices.size(); i++) { danglingIndexIds.add(mapView.get("dangling_indices." + i + ".index_uuid")); } @@ -187,23 +191,6 @@ private List listDanglingIndexIds() throws IOException { return danglingIndexIds; } - /** - * Given a node name, finds the corresponding node ID. - */ - private String mapNodeNameToId(String nodeName) throws IOException { - final Response catResponse = getRestClient().performRequest(new Request("GET", "/_cat/nodes?full_id&h=id,name")); - assertOK(catResponse); - - for (String nodeLine : Streams.readAllLines(catResponse.getEntity().getContent())) { - String[] elements = nodeLine.split(" "); - if (elements[1].equals(nodeName)) { - return elements[0]; - } - } - - throw new AssertionError("Failed to map node name [" + nodeName + "] to node ID"); - } - /** * Helper that creates one or more indices, and importantly, * checks that they are green before proceeding. This is important diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java index 3f7ed48b714fb..b9850bc95275c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.NoOpEngine; import org.elasticsearch.index.flush.FlushStats; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.RetentionLeaseSyncer; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -633,7 +634,8 @@ public static final IndexShard newIndexShard( cbs, IndexModule.DEFAULT_SNAPSHOT_COMMIT_SUPPLIER, System::nanoTime, - null + null, + MapperMetrics.NOOP ); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index 2bbe3d36f031f..b6389d0b112b6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -52,11 +52,6 @@ protected AcknowledgedRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout) this.ackTimeout = Objects.requireNonNull(ackTimeout); } - @Deprecated(forRemoval = true) // just a temporary compatibility shim - protected AcknowledgedRequest(TimeValue ackTimeout) { - this(MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ackTimeout); - } - protected AcknowledgedRequest(StreamInput in) throws IOException { super(in); this.ackTimeout = in.readTimeValue(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java index 641fc0e76311f..0124f23a1156d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifier.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; @@ -58,6 +59,7 @@ public class IndexMetadataVerifier { private final MapperRegistry mapperRegistry; private final IndexScopedSettings indexScopedSettings; private final ScriptCompiler scriptService; + private final MapperMetrics mapperMetrics; public IndexMetadataVerifier( Settings settings, @@ -65,7 +67,8 @@ public IndexMetadataVerifier( NamedXContentRegistry xContentRegistry, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings, - ScriptCompiler scriptCompiler + ScriptCompiler scriptCompiler, + MapperMetrics mapperMetrics ) { this.settings = settings; this.clusterService = clusterService; @@ -74,6 +77,7 @@ public IndexMetadataVerifier( this.mapperRegistry = mapperRegistry; this.indexScopedSettings = indexScopedSettings; this.scriptService = scriptCompiler; + this.mapperMetrics = mapperMetrics; } /** @@ -182,7 +186,8 @@ protected TokenStreamComponents createComponents(String fieldName) { mapperRegistry, () -> null, indexSettings.getMode().idFieldMapperWithoutFieldData(), - scriptService + scriptService, + mapperMetrics ) ) { mapperService.merge(indexMetadata, MapperService.MergeReason.MAPPING_RECOVERY); diff --git a/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java b/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java new file mode 100644 index 0000000000000..39dbb83bdf5a4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.util.stream.IntStream; + +/** + * A CharSequence that provides a subsequence of another CharSequence without allocating a new backing array (as String does) + */ +class CharSubSequence implements CharSequence { + private final CharSequence wrapped; + private final int startOffset; // inclusive + private final int endOffset; // exclusive + + CharSubSequence(CharSequence wrapped, int startOffset, int endOffset) { + if (startOffset < 0) throw new IllegalArgumentException(); + if (endOffset > wrapped.length()) throw new IllegalArgumentException(); + if (endOffset < startOffset) throw new IllegalArgumentException(); + + this.wrapped = wrapped; + this.startOffset = startOffset; + this.endOffset = endOffset; + } + + @Override + public int length() { + return endOffset - startOffset; + } + + @Override + public char charAt(int index) { + int adjustedIndex = index + startOffset; + if (adjustedIndex < startOffset || adjustedIndex >= endOffset) throw new IndexOutOfBoundsException(index); + return wrapped.charAt(adjustedIndex); + } + + @Override + public boolean isEmpty() { + return startOffset == endOffset; + } + + @Override + public CharSequence subSequence(int start, int end) { + int adjustedStart = start + startOffset; + int adjustedEnd = end + startOffset; + if (adjustedStart < startOffset) throw new IndexOutOfBoundsException(start); + if (adjustedEnd > endOffset) throw new IndexOutOfBoundsException(end); + if (adjustedStart > adjustedEnd) throw new IndexOutOfBoundsException(); + + return wrapped.subSequence(adjustedStart, adjustedEnd); + } + + @Override + public IntStream chars() { + return wrapped.chars().skip(startOffset).limit(endOffset - startOffset); + } + + @Override + public String toString() { + return wrapped.subSequence(startOffset, endOffset).toString(); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 7dae11fb8d720..1133eac3f8f7b 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -9,7 +9,10 @@ package org.elasticsearch.common.time; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Booleans; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.logging.internal.spi.LoggerFactory; import java.time.Instant; import java.time.LocalDate; @@ -30,6 +33,7 @@ import java.time.temporal.TemporalQuery; import java.time.temporal.WeekFields; import java.util.Locale; +import java.util.Set; import java.util.stream.Stream; import static java.time.temporal.ChronoField.DAY_OF_MONTH; @@ -43,6 +47,24 @@ public class DateFormatters { + /** + * The ISO8601 parser is as close as possible to the java.time based parsers, but there are some strings + * that are no longer accepted (multiple fractional seconds, or multiple timezones) by the ISO parser. + * If a string cannot be parsed by the ISO parser, it then tries the java.time one. + * If there's lots of these strings, trying the ISO parser, then the java.time parser, might cause a performance drop. + * So provide a JVM option so that users can just use the java.time parsers, if they really need to. + */ + @UpdateForV9 // evaluate if we need to deprecate/remove this + private static final boolean JAVA_TIME_PARSERS_ONLY = Booleans.parseBoolean(System.getProperty("es.datetime.java_time_parsers"), false); + + static { + // when this is used directly in tests ES logging may not have been initialized yet + LoggerFactory logger; + if (JAVA_TIME_PARSERS_ONLY && (logger = LoggerFactory.provider()) != null) { + logger.getLogger(DateFormatters.class).info("Using java.time datetime parsers only"); + } + } + private static DateFormatter newDateFormatter(String format, DateTimeFormatter formatter) { return new JavaDateFormatter(format, new JavaTimeDateTimePrinter(formatter), new JavaTimeDateTimeParser(formatter)); } @@ -168,11 +190,18 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p /** * Returns a generic ISO datetime parser where the date is mandatory and the time is optional. */ - private static final DateFormatter STRICT_DATE_OPTIONAL_TIME = newDateFormatter( - "strict_date_optional_time", - STRICT_DATE_OPTIONAL_TIME_PRINTER, - STRICT_DATE_OPTIONAL_TIME_FORMATTER - ); + private static final DateFormatter STRICT_DATE_OPTIONAL_TIME; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser(STRICT_DATE_OPTIONAL_TIME_FORMATTER); + + STRICT_DATE_OPTIONAL_TIME = new JavaDateFormatter( + "strict_date_optional_time", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { new Iso8601DateTimeParser(Set.of(), false).withLocale(Locale.ROOT), javaTimeParser } + ); + } private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS = new DateTimeFormatterBuilder().append( STRICT_YEAR_MONTH_DAY_FORMATTER @@ -224,51 +253,69 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p /** * Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution. */ - private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = newDateFormatter( - "strict_date_optional_time_nanos", - STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS, - STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS - ); + private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser(STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS); + + STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter( + "strict_date_optional_time_nanos", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { + new Iso8601DateTimeParser(Set.of(HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), true).withLocale(Locale.ROOT), + javaTimeParser } + ); + } /** * Returns a ISO 8601 compatible date time formatter and parser. * This is not fully compatible to the existing spec, which would require far more edge cases, but merely compatible with the * existing legacy joda time ISO date formatter */ - private static final DateFormatter ISO_8601 = newDateFormatter( - "iso8601", - STRICT_DATE_OPTIONAL_TIME_PRINTER, - new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .optionalStart() - .appendLiteral('T') - .optionalStart() - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendFraction(NANO_OF_SECOND, 1, 9, true) - .optionalEnd() - .optionalStart() - .appendLiteral(",") - .appendFraction(NANO_OF_SECOND, 1, 9, false) - .optionalEnd() - .optionalEnd() - .optionalEnd() - .optionalEnd() - .optionalStart() - .appendZoneOrOffsetId() - .optionalEnd() - .optionalStart() - .append(TIME_ZONE_FORMATTER_NO_COLON) - .optionalEnd() - .optionalEnd() - .toFormatter(Locale.ROOT) - .withResolverStyle(ResolverStyle.STRICT) - ); + private static final DateFormatter ISO_8601; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser( + new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .optionalStart() + .appendLiteral('T') + .optionalStart() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 1, 9, true) + .optionalEnd() + .optionalStart() + .appendLiteral(",") + .appendFraction(NANO_OF_SECOND, 1, 9, false) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_NO_COLON) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT) + ); + + ISO_8601 = new JavaDateFormatter( + "iso8601", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { new Iso8601DateTimeParser(Set.of(), false).withLocale(Locale.ROOT), javaTimeParser } + ); + } ///////////////////////////////////////// // diff --git a/server/src/main/java/org/elasticsearch/common/time/DateTime.java b/server/src/main/java/org/elasticsearch/common/time/DateTime.java new file mode 100644 index 0000000000000..101389b43d9fc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/DateTime.java @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalField; +import java.time.temporal.TemporalQueries; +import java.time.temporal.TemporalQuery; +import java.time.temporal.UnsupportedTemporalTypeException; + +/** + * Provides information on a parsed datetime + */ +record DateTime( + int years, + Integer months, + Integer days, + Integer hours, + Integer minutes, + Integer seconds, + Integer nanos, + ZoneId zoneId, + ZoneOffset offset +) implements TemporalAccessor { + + @Override + @SuppressWarnings("unchecked") + public R query(TemporalQuery query) { + // shortcut a few queries used by DateFormatters.from + if (query == TemporalQueries.zoneId()) { + return (R) zoneId; + } + if (query == TemporalQueries.offset()) { + return (R) offset; + } + if (query == DateFormatters.LOCAL_DATE_QUERY || query == TemporalQueries.localDate()) { + if (months != null && days != null) { + return (R) LocalDate.of(years, months, days); + } + return null; + } + if (query == TemporalQueries.localTime()) { + if (hours != null && minutes != null && seconds != null) { + return (R) LocalTime.of(hours, minutes, seconds, nanos != null ? nanos : 0); + } + return null; + } + return TemporalAccessor.super.query(query); + } + + @Override + public boolean isSupported(TemporalField field) { + if (field instanceof ChronoField f) { + return switch (f) { + case YEAR -> true; + case MONTH_OF_YEAR -> months != null; + case DAY_OF_MONTH -> days != null; + case HOUR_OF_DAY -> hours != null; + case MINUTE_OF_HOUR -> minutes != null; + case SECOND_OF_MINUTE -> seconds != null; + case INSTANT_SECONDS -> months != null && days != null && hours != null && minutes != null && seconds != null; + // if the time components are there, we just default nanos to 0 if it's not present + case SECOND_OF_DAY, NANO_OF_SECOND, NANO_OF_DAY -> hours != null && minutes != null && seconds != null; + case OFFSET_SECONDS -> offset != null; + default -> false; + }; + } + + return field.isSupportedBy(this); + } + + @Override + public long getLong(TemporalField field) { + if (field instanceof ChronoField f) { + switch (f) { + case YEAR -> { + return years; + } + case MONTH_OF_YEAR -> { + return extractValue(f, months); + } + case DAY_OF_MONTH -> { + return extractValue(f, days); + } + case HOUR_OF_DAY -> { + return extractValue(f, hours); + } + case MINUTE_OF_HOUR -> { + return extractValue(f, minutes); + } + case SECOND_OF_MINUTE -> { + return extractValue(f, seconds); + } + case INSTANT_SECONDS -> { + if (isSupported(ChronoField.INSTANT_SECONDS) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalDateTime.of(years, months, days, hours, minutes, seconds) + .toEpochSecond(offset != null ? offset : ZoneOffset.UTC); + } + case SECOND_OF_DAY -> { + if (isSupported(ChronoField.SECOND_OF_DAY) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalTime.of(hours, minutes, seconds).toSecondOfDay(); + } + case NANO_OF_SECOND -> { + if (isSupported(ChronoField.NANO_OF_SECOND) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return nanos != null ? nanos.longValue() : 0L; + } + case NANO_OF_DAY -> { + if (isSupported(ChronoField.NANO_OF_DAY) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalTime.of(hours, minutes, seconds, nanos != null ? nanos : 0).toNanoOfDay(); + } + case OFFSET_SECONDS -> { + if (offset == null) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return offset.getTotalSeconds(); + } + default -> throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + } + + return field.getFrom(this); + } + + private static long extractValue(ChronoField field, Number value) { + if (value == null) { + throw new UnsupportedTemporalTypeException("No " + field + " value available"); + } + return value.longValue(); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java b/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java new file mode 100644 index 0000000000000..2a526a36408ce --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.time.ZoneId; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +class Iso8601DateTimeParser implements DateTimeParser { + + private final Iso8601Parser parser; + private final ZoneId timezone; + // the locale doesn't actually matter, as we're parsing in a standardised format + // and we already account for . or , in decimals + private final Locale locale; + + Iso8601DateTimeParser(Set mandatoryFields, boolean optionalTime) { + parser = new Iso8601Parser(mandatoryFields, optionalTime, Map.of()); + timezone = null; + locale = null; + } + + private Iso8601DateTimeParser(Iso8601Parser parser, ZoneId timezone, Locale locale) { + this.parser = parser; + this.timezone = timezone; + this.locale = locale; + } + + @Override + public ZoneId getZone() { + return timezone; + } + + @Override + public Locale getLocale() { + return locale; + } + + @Override + public DateTimeParser withZone(ZoneId zone) { + return new Iso8601DateTimeParser(parser, zone, locale); + } + + @Override + public DateTimeParser withLocale(Locale locale) { + return new Iso8601DateTimeParser(parser, timezone, locale); + } + + Iso8601DateTimeParser withDefaults(Map defaults) { + return new Iso8601DateTimeParser(new Iso8601Parser(parser.mandatoryFields(), parser.optionalTime(), defaults), timezone, locale); + } + + @Override + public TemporalAccessor parse(CharSequence str) { + var result = parser.tryParse(str, timezone); + var temporal = result.result(); + if (temporal == null) { + throw new DateTimeParseException("Could not fully parse datetime", str, result.errorIndex()); + } + return temporal; + } + + @Override + public Optional tryParse(CharSequence str) { + return Optional.ofNullable(parser.tryParse(str, timezone).result()); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java b/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java new file mode 100644 index 0000000000000..4f1d131dd8ced --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java @@ -0,0 +1,521 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.core.Nullable; + +import java.time.DateTimeException; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoField; +import java.util.EnumMap; +import java.util.EnumSet; +import java.util.Map; +import java.util.Set; + +/** + * Parses datetimes in ISO8601 format (and subsequences thereof). + *

+ * This is faster than the generic parsing in {@link java.time.format.DateTimeFormatter}, as this is hard-coded and specific to ISO-8601. + * Various public libraries provide their own variant of this mechanism. We use our own for a few reasons: + *

    + *
  • + * We are historically a bit more lenient with strings that are invalid according to the strict specification + * (eg using a zone region instead of offset for timezone) + *
  • + *
  • Various built-in formats specify some fields as mandatory and some as optional
  • + *
  • Callers can specify defaults for fields that are not present (eg for roundup parsers)
  • + *
+ * We also do not use exceptions here, instead returning {@code null} for any invalid values, that are then + * checked and propagated as appropriate. + */ +class Iso8601Parser { + + /** + * The result of the parse. If successful, {@code result} will be non-null. + * If parse failed, {@code errorIndex} specifies the index into the parsed string + * that the first invalid data was encountered. + */ + record Result(@Nullable DateTime result, int errorIndex) { + Result(DateTime result) { + this(result, -1); + } + + static Result error(int errorIndex) { + return new Result(null, errorIndex); + } + } + + private static final Set VALID_MANDATORY_FIELDS = EnumSet.of( + ChronoField.YEAR, + ChronoField.MONTH_OF_YEAR, + ChronoField.DAY_OF_MONTH, + ChronoField.HOUR_OF_DAY, + ChronoField.MINUTE_OF_HOUR, + ChronoField.SECOND_OF_MINUTE + ); + + private static final Set VALID_DEFAULT_FIELDS = EnumSet.of( + ChronoField.MONTH_OF_YEAR, + ChronoField.DAY_OF_MONTH, + ChronoField.HOUR_OF_DAY, + ChronoField.MINUTE_OF_HOUR, + ChronoField.SECOND_OF_MINUTE, + ChronoField.NANO_OF_SECOND + ); + + private final Set mandatoryFields; + private final boolean optionalTime; + private final Map defaults; + + /** + * Constructs a new {@code Iso8601Parser} object + * + * @param mandatoryFields + * The set of fields that must be present for a valid parse. These should be specified in field order + * (eg if {@link ChronoField#DAY_OF_MONTH} is specified, {@link ChronoField#MONTH_OF_YEAR} should also be specified). + * {@link ChronoField#YEAR} is always mandatory. + * @param optionalTime + * {@code false} if the presence of time fields follows {@code mandatoryFields}, + * {@code true} if a time component is always optional, despite the presence of time fields in {@code mandatoryFields}. + * This makes it possible to specify 'time is optional, but if it is present, it must have these fields' + * by settings {@code optionalTime = true} and putting time fields such as {@link ChronoField#HOUR_OF_DAY} + * and {@link ChronoField#MINUTE_OF_HOUR} in {@code mandatoryFields}. + * @param defaults + * Map of default field values, if they are not present in the parsed string. + */ + Iso8601Parser(Set mandatoryFields, boolean optionalTime, Map defaults) { + checkChronoFields(mandatoryFields, VALID_MANDATORY_FIELDS); + checkChronoFields(defaults.keySet(), VALID_DEFAULT_FIELDS); + + this.mandatoryFields = EnumSet.of(ChronoField.YEAR); // year is always mandatory + this.mandatoryFields.addAll(mandatoryFields); + this.optionalTime = optionalTime; + this.defaults = defaults.isEmpty() ? Map.of() : new EnumMap<>(defaults); + } + + private static void checkChronoFields(Set fields, Set validFields) { + if (fields.isEmpty()) return; // nothing to check + + fields = EnumSet.copyOf(fields); + fields.removeAll(validFields); + if (fields.isEmpty() == false) { + throw new IllegalArgumentException("Invalid chrono fields specified " + fields); + } + } + + boolean optionalTime() { + return optionalTime; + } + + Set mandatoryFields() { + return mandatoryFields; + } + + private boolean isOptional(ChronoField field) { + return mandatoryFields.contains(field) == false; + } + + private Integer defaultZero(ChronoField field) { + return defaults.getOrDefault(field, 0); + } + + /** + * Attempts to parse {@code str} as an ISO-8601 datetime, returning a {@link Result} indicating if the parse + * was successful or not, and what fields were present. + * @param str The string to parse + * @param defaultTimezone The default timezone to return, if no timezone is present in the string + * @return The {@link Result} of the parse. + */ + Result tryParse(CharSequence str, @Nullable ZoneId defaultTimezone) { + if (str.charAt(0) == '-') { + // the year is negative. This is most unusual. + // Instead of always adding offsets and dynamically calculating position in the main parser code below, + // just in case it starts with a -, just parse the substring, then adjust the output appropriately + Result result = parse(new CharSubSequence(str, 1, str.length()), defaultTimezone); + + if (result.errorIndex() >= 0) { + return Result.error(result.errorIndex() + 1); + } else { + DateTime dt = result.result(); + return new Result( + new DateTime( + -dt.years(), + dt.months(), + dt.days(), + dt.hours(), + dt.minutes(), + dt.seconds(), + dt.nanos(), + dt.zoneId(), + dt.offset() + ) + ); + } + } else { + return parse(str, defaultTimezone); + } + } + + /** + * Index {@code i} is the multiplicand to get the number of nanos from the fractional second with {@code i=9-d} digits. + */ + private static final int[] NANO_MULTIPLICANDS = new int[] { 1, 10, 100, 1_000, 10_000, 100_000, 1_000_000, 10_000_000, 100_000_000 }; + + /** + * Parses {@code str} in ISO8601 format. + *

+ * This parses the string using fixed offsets (it does not support variable-width fields) and separators, + * sequentially parsing each field and looking for the correct separator. + * This enables it to be very fast, as all the fields are in fixed places in the string. + * The only variable aspect comes from the timezone, which (fortunately) is only present at the end of the string, + * at any point after a time field. + * It also does not use exceptions, instead returning {@code null} where a value cannot be parsed. + */ + private Result parse(CharSequence str, @Nullable ZoneId defaultTimezone) { + int len = str.length(); + + // YEARS + Integer years = parseInt(str, 0, 4); + if (years == null) return Result.error(0); + if (len == 4) { + return isOptional(ChronoField.MONTH_OF_YEAR) + ? new Result( + withZoneOffset( + years, + defaults.get(ChronoField.MONTH_OF_YEAR), + defaults.get(ChronoField.DAY_OF_MONTH), + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(4); + } + + if (str.charAt(4) != '-') return Result.error(4); + + // MONTHS + Integer months = parseInt(str, 5, 7); + if (months == null || months > 12) return Result.error(5); + if (len == 7) { + return isOptional(ChronoField.DAY_OF_MONTH) + ? new Result( + withZoneOffset( + years, + months, + defaults.get(ChronoField.DAY_OF_MONTH), + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(7); + } + + if (str.charAt(7) != '-') return Result.error(7); + + // DAYS + Integer days = parseInt(str, 8, 10); + if (days == null || days > 31) return Result.error(8); + if (len == 10) { + return optionalTime || isOptional(ChronoField.HOUR_OF_DAY) + ? new Result( + withZoneOffset( + years, + months, + days, + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(10); + } + + if (str.charAt(10) != 'T') return Result.error(10); + if (len == 11) { + return isOptional(ChronoField.HOUR_OF_DAY) + ? new Result( + withZoneOffset( + years, + months, + days, + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(11); + } + + // HOURS + timezone + Integer hours = parseInt(str, 11, 13); + if (hours == null || hours > 23) return Result.error(11); + if (len == 13) { + return isOptional(ChronoField.MINUTE_OF_HOUR) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + defaultZero(ChronoField.MINUTE_OF_HOUR), + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(13); + } + if (isZoneId(str, 13)) { + ZoneId timezone = parseZoneId(str, 13); + return timezone != null && isOptional(ChronoField.MINUTE_OF_HOUR) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + defaultZero(ChronoField.MINUTE_OF_HOUR), + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + timezone + ) + ) + : Result.error(13); + } + + if (str.charAt(13) != ':') return Result.error(13); + + // MINUTES + timezone + Integer minutes = parseInt(str, 14, 16); + if (minutes == null || minutes > 59) return Result.error(14); + if (len == 16) { + return isOptional(ChronoField.SECOND_OF_MINUTE) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + minutes, + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(16); + } + if (isZoneId(str, 16)) { + ZoneId timezone = parseZoneId(str, 16); + return timezone != null && isOptional(ChronoField.SECOND_OF_MINUTE) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + minutes, + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + timezone + ) + ) + : Result.error(16); + } + + if (str.charAt(16) != ':') return Result.error(16); + + // SECONDS + timezone + Integer seconds = parseInt(str, 17, 19); + if (seconds == null || seconds > 59) return Result.error(17); + if (len == 19) { + return new Result( + withZoneOffset(years, months, days, hours, minutes, seconds, defaultZero(ChronoField.NANO_OF_SECOND), defaultTimezone) + ); + } + if (isZoneId(str, 19)) { + ZoneId timezone = parseZoneId(str, 19); + return timezone != null + ? new Result( + withZoneOffset(years, months, days, hours, minutes, seconds, defaultZero(ChronoField.NANO_OF_SECOND), timezone) + ) + : Result.error(19); + } + + char decSeparator = str.charAt(19); + if (decSeparator != '.' && decSeparator != ',') return Result.error(19); + + // NANOS + timezone + // nanos are always optional + // the last number could be millis or nanos, or any combination in the middle + // so we keep parsing numbers until we get to not a number + int nanos = 0; + int pos; + for (pos = 20; pos < len && pos < 29; pos++) { + char c = str.charAt(pos); + if (c < ZERO || c > NINE) break; + nanos = nanos * 10 + (c - ZERO); + } + + if (pos == 20) return Result.error(20); // didn't find a number at all + + // multiply it by the correct multiplicand to get the nanos + nanos *= NANO_MULTIPLICANDS[29 - pos]; + + if (len == pos) { + return new Result(withZoneOffset(years, months, days, hours, minutes, seconds, nanos, defaultTimezone)); + } + if (isZoneId(str, pos)) { + ZoneId timezone = parseZoneId(str, pos); + return timezone != null + ? new Result(withZoneOffset(years, months, days, hours, minutes, seconds, nanos, timezone)) + : Result.error(pos); + } + + // still chars left at the end - string is not valid + return Result.error(pos); + } + + private static boolean isZoneId(CharSequence str, int pos) { + // all region zoneIds must start with [A-Za-z] (see ZoneId#of) + // this also covers Z and UT/UTC/GMT zone variants + char c = str.charAt(pos); + return c == '+' || c == '-' || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'); + } + + /** + * This parses the zone offset, which is of the format accepted by {@link java.time.ZoneId#of(String)}. + * It has fast paths for numerical offsets, but falls back on {@code ZoneId.of} for non-trivial zone ids. + */ + private ZoneId parseZoneId(CharSequence str, int pos) { + int len = str.length(); + char first = str.charAt(pos); + + if (first == 'Z' && len == pos + 1) { + return ZoneOffset.UTC; + } + + boolean positive; + switch (first) { + case '+' -> positive = true; + case '-' -> positive = false; + default -> { + // non-trivial zone offset, fallback on the built-in java zoneid parser + try { + return ZoneId.of(str.subSequence(pos, str.length()).toString()); + } catch (DateTimeException e) { + return null; + } + } + } + pos++; // read the + or - + + Integer hours = parseInt(str, pos, pos += 2); + if (hours == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, 0, 0, positive); + + boolean hasColon = false; + if (str.charAt(pos) == ':') { + pos++; + hasColon = true; + } + + Integer minutes = parseInt(str, pos, pos += 2); + if (minutes == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, minutes, 0, positive); + + // either both dividers have a colon, or neither do + if ((str.charAt(pos) == ':') != hasColon) return null; + if (hasColon) { + pos++; + } + + Integer seconds = parseInt(str, pos, pos += 2); + if (seconds == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, minutes, seconds, positive); + + // there's some text left over... + return null; + } + + /* + * ZoneOffset.ofTotalSeconds has a ConcurrentHashMap cache of offsets. This is fine, + * but it does mean there's an expensive map lookup every time we call ofTotalSeconds. + * There's no way to get round that, but we can at least have a very quick last-value cache here + * to avoid doing a full map lookup when there's lots of timestamps with the same offset being parsed + */ + private final ThreadLocal lastOffset = ThreadLocal.withInitial(() -> ZoneOffset.UTC); + + private ZoneOffset ofHoursMinutesSeconds(int hours, int minutes, int seconds, boolean positive) { + int totalSeconds = hours * 3600 + minutes * 60 + seconds; + if (positive == false) { + totalSeconds = -totalSeconds; + } + + // check the lastOffset value + ZoneOffset lastOffset = this.lastOffset.get(); + if (totalSeconds == lastOffset.getTotalSeconds()) { + return lastOffset; + } + + try { + ZoneOffset offset = ZoneOffset.ofTotalSeconds(totalSeconds); + this.lastOffset.set(lastOffset); + return offset; + } catch (DateTimeException e) { + // zoneoffset is out of range + return null; + } + } + + /** + * Create a {@code DateTime} object, with the ZoneOffset field set when the zone is an offset, not just an id. + */ + private static DateTime withZoneOffset( + int years, + Integer months, + Integer days, + Integer hours, + Integer minutes, + Integer seconds, + Integer nanos, + ZoneId zoneId + ) { + if (zoneId instanceof ZoneOffset zo) { + return new DateTime(years, months, days, hours, minutes, seconds, nanos, zoneId, zo); + } else { + return new DateTime(years, months, days, hours, minutes, seconds, nanos, zoneId, null); + } + } + + private static final char ZERO = '0'; + private static final char NINE = '9'; + + private static Integer parseInt(CharSequence str, int startInclusive, int endExclusive) { + if (str.length() < endExclusive) return null; + + int result = 0; + for (int i = startInclusive; i < endExclusive; i++) { + char c = str.charAt(i); + if (c < ZERO || c > NINE) return null; + result = result * 10 + (c - ZERO); + } + return result; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index 9c39ee51276d7..707b07c1d68d9 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -21,15 +21,21 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.function.UnaryOperator; +import static java.util.Map.entry; + class JavaDateFormatter implements DateFormatter { @SuppressWarnings("unchecked") private static T defaultRoundUp(T parser) { if (parser instanceof JavaTimeDateTimeParser jtp) { return (T) defaultRoundUp(jtp); } + if (parser instanceof Iso8601DateTimeParser iso) { + return (T) defaultRoundUp(iso); + } throw new IllegalArgumentException("Unknown parser implementation " + parser.getClass()); } @@ -78,6 +84,19 @@ private static JavaTimeDateTimeParser defaultRoundUp(JavaTimeDateTimeParser pars return new JavaTimeDateTimeParser(builder.toFormatter(parser.getLocale())); } + private static Iso8601DateTimeParser defaultRoundUp(Iso8601DateTimeParser parser) { + return parser.withDefaults( + Map.ofEntries( + entry(ChronoField.MONTH_OF_YEAR, 1), + entry(ChronoField.DAY_OF_MONTH, 1), + entry(ChronoField.HOUR_OF_DAY, 23), + entry(ChronoField.MINUTE_OF_HOUR, 59), + entry(ChronoField.SECOND_OF_MINUTE, 59), + entry(ChronoField.NANO_OF_SECOND, 999_999_999) + ) + ); + } + private final String format; private final DateTimePrinter printer; private final DateTimeParser[] parsers; diff --git a/server/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java b/server/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java index 4dfb3d1f46e25..a1b8690e7ea66 100644 --- a/server/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/AbstractBigArray.java @@ -19,7 +19,7 @@ /** Common implementation for array lists that slice data into fixed-size blocks. */ abstract class AbstractBigArray extends AbstractArray { - private final PageCacheRecycler recycler; + protected final PageCacheRecycler recycler; private Recycler.V[] cache; private final int pageShift; @@ -93,7 +93,7 @@ private static T[] grow(T[] array, int minSize) { return array; } - private T registerNewPage(Recycler.V v, int page, int expectedSize) { + protected T registerNewPage(Recycler.V v, int page, int expectedSize) { cache = grow(cache, page + 1); assert cache[page] == null; cache[page] = v; @@ -101,24 +101,6 @@ private T registerNewPage(Recycler.V v, int page, int expectedSize) { return v.v(); } - protected final byte[] newBytePage(int page) { - if (recycler != null) { - final Recycler.V v = recycler.bytePage(clearOnResize); - return registerNewPage(v, page, PageCacheRecycler.BYTE_PAGE_SIZE); - } else { - return new byte[PageCacheRecycler.BYTE_PAGE_SIZE]; - } - } - - protected final Object[] newObjectPage(int page) { - if (recycler != null) { - final Recycler.V v = recycler.objectPage(); - return registerNewPage(v, page, PageCacheRecycler.OBJECT_PAGE_SIZE); - } else { - return new Object[PageCacheRecycler.OBJECT_PAGE_SIZE]; - } - } - protected final void releasePage(int page) { if (recycler != null) { cache[page].close(); @@ -134,38 +116,4 @@ protected final void doClose() { } } - /** - * Fills an array with a value by copying it to itself, increasing copy ranges in each iteration - */ - protected static final void fillBySelfCopy(byte[] page, int fromBytes, int toBytes, int initialCopyBytes) { - for (int pos = fromBytes + initialCopyBytes; pos < toBytes;) { - int sourceBytesLength = pos - fromBytes; // source bytes available to be copied - int copyBytesLength = Math.min(sourceBytesLength, toBytes - pos); // number of bytes to actually copy - System.arraycopy(page, fromBytes, page, pos, copyBytesLength); - pos += copyBytesLength; - } - } - - /** - * Bulk copies array to paged array - */ - public void set(long index, byte[] buf, int offset, int len, byte[][] pages, int shift) { - assert index + len <= size(); - int pageIndex = pageIndex(index); - final int indexInPage = indexInPage(index); - if (indexInPage + len <= pageSize()) { - System.arraycopy(buf, offset << shift, pages[pageIndex], indexInPage << shift, len << shift); - } else { - int copyLen = pageSize() - indexInPage; - System.arraycopy(buf, offset << shift, pages[pageIndex], indexInPage, copyLen << shift); - do { - ++pageIndex; - offset += copyLen; - len -= copyLen; - copyLen = Math.min(len, pageSize()); - System.arraycopy(buf, offset << shift, pages[pageIndex], 0, copyLen << shift); - } while (len > copyLen); - } - } - } diff --git a/server/src/main/java/org/elasticsearch/common/util/AbstractBigByteArray.java b/server/src/main/java/org/elasticsearch/common/util/AbstractBigByteArray.java new file mode 100644 index 0000000000000..06224ba605af0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/AbstractBigByteArray.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.common.recycler.Recycler; + +abstract class AbstractBigByteArray extends AbstractBigArray { + + protected byte[][] pages; + + protected AbstractBigByteArray(int pageSize, BigArrays bigArrays, boolean clearOnResize, long size) { + super(pageSize, bigArrays, clearOnResize); + this.size = size; + pages = new byte[numPages(size)][]; + for (int i = 0; i < pages.length; ++i) { + pages[i] = newBytePage(i); + } + } + + protected final byte[] newBytePage(int page) { + if (recycler != null) { + final Recycler.V v = recycler.bytePage(clearOnResize); + return registerNewPage(v, page, PageCacheRecycler.BYTE_PAGE_SIZE); + } else { + return new byte[PageCacheRecycler.BYTE_PAGE_SIZE]; + } + } + + /** + * Fills an array with a value by copying it to itself, increasing copy ranges in each iteration + */ + protected static void fillBySelfCopy(byte[] page, int fromBytes, int toBytes, int initialCopyBytes) { + for (int pos = fromBytes + initialCopyBytes; pos < toBytes;) { + int sourceBytesLength = pos - fromBytes; // source bytes available to be copied + int copyBytesLength = Math.min(sourceBytesLength, toBytes - pos); // number of bytes to actually copy + System.arraycopy(page, fromBytes, page, pos, copyBytesLength); + pos += copyBytesLength; + } + } + + /** + * Bulk copies array to paged array + */ + protected void set(long index, byte[] buf, int offset, int len, byte[][] pages, int shift) { + assert index + len <= size(); + int pageIndex = pageIndex(index); + final int indexInPage = indexInPage(index); + if (indexInPage + len <= pageSize()) { + System.arraycopy(buf, offset << shift, pages[pageIndex], indexInPage << shift, len << shift); + } else { + int copyLen = pageSize() - indexInPage; + System.arraycopy(buf, offset << shift, pages[pageIndex], indexInPage, copyLen << shift); + do { + ++pageIndex; + offset += copyLen; + len -= copyLen; + copyLen = Math.min(len, pageSize()); + System.arraycopy(buf, offset << shift, pages[pageIndex], 0, copyLen << shift); + } while (len > copyLen); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java b/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java index 379c714b2d355..6d870fb6e6b83 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigByteArray.java @@ -26,20 +26,13 @@ * Byte array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of * configurable length. */ -final class BigByteArray extends AbstractBigArray implements ByteArray { +final class BigByteArray extends AbstractBigByteArray implements ByteArray { private static final BigByteArray ESTIMATOR = new BigByteArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); - private byte[][] pages; - /** Constructor. */ BigByteArray(long size, BigArrays bigArrays, boolean clearOnResize) { - super(BYTE_PAGE_SIZE, bigArrays, clearOnResize); - this.size = size; - pages = new byte[numPages(size)][]; - for (int i = 0; i < pages.length; ++i) { - pages[i] = newBytePage(i); - } + super(BYTE_PAGE_SIZE, bigArrays, clearOnResize, size); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java b/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java index 041852cf08560..4b1c9e374b51a 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigDoubleArray.java @@ -27,22 +27,15 @@ * Double array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of * configurable length. */ -final class BigDoubleArray extends AbstractBigArray implements DoubleArray { +final class BigDoubleArray extends AbstractBigByteArray implements DoubleArray { private static final BigDoubleArray ESTIMATOR = new BigDoubleArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); static final VarHandle VH_PLATFORM_NATIVE_DOUBLE = MethodHandles.byteArrayViewVarHandle(double[].class, ByteOrder.nativeOrder()); - private byte[][] pages; - /** Constructor. */ BigDoubleArray(long size, BigArrays bigArrays, boolean clearOnResize) { - super(DOUBLE_PAGE_SIZE, bigArrays, clearOnResize); - this.size = size; - pages = new byte[numPages(size)][]; - for (int i = 0; i < pages.length; ++i) { - pages[i] = newBytePage(i); - } + super(DOUBLE_PAGE_SIZE, bigArrays, clearOnResize, size); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java b/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java index 793d071bef54e..b537cec79295d 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigFloatArray.java @@ -22,22 +22,15 @@ * Float array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of * configurable length. */ -final class BigFloatArray extends AbstractBigArray implements FloatArray { +final class BigFloatArray extends AbstractBigByteArray implements FloatArray { private static final BigFloatArray ESTIMATOR = new BigFloatArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); static final VarHandle VH_PLATFORM_NATIVE_FLOAT = MethodHandles.byteArrayViewVarHandle(float[].class, ByteOrder.nativeOrder()); - private byte[][] pages; - /** Constructor. */ BigFloatArray(long size, BigArrays bigArrays, boolean clearOnResize) { - super(FLOAT_PAGE_SIZE, bigArrays, clearOnResize); - this.size = size; - pages = new byte[numPages(size)][]; - for (int i = 0; i < pages.length; ++i) { - pages[i] = newBytePage(i); - } + super(FLOAT_PAGE_SIZE, bigArrays, clearOnResize, size); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java b/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java index f12293ab41ae8..27cc254eade78 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigIntArray.java @@ -27,21 +27,14 @@ * Int array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of * configurable length. */ -final class BigIntArray extends AbstractBigArray implements IntArray { +final class BigIntArray extends AbstractBigByteArray implements IntArray { private static final BigIntArray ESTIMATOR = new BigIntArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); static final VarHandle VH_PLATFORM_NATIVE_INT = MethodHandles.byteArrayViewVarHandle(int[].class, ByteOrder.nativeOrder()); - private byte[][] pages; - /** Constructor. */ BigIntArray(long size, BigArrays bigArrays, boolean clearOnResize) { - super(INT_PAGE_SIZE, bigArrays, clearOnResize); - this.size = size; - pages = new byte[numPages(size)][]; - for (int i = 0; i < pages.length; ++i) { - pages[i] = newBytePage(i); - } + super(INT_PAGE_SIZE, bigArrays, clearOnResize, size); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java b/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java index d39ef7a7841f9..4e7300e84cbe4 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigLongArray.java @@ -25,22 +25,15 @@ * Long array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of * configurable length. */ -final class BigLongArray extends AbstractBigArray implements LongArray { +final class BigLongArray extends AbstractBigByteArray implements LongArray { private static final BigLongArray ESTIMATOR = new BigLongArray(0, BigArrays.NON_RECYCLING_INSTANCE, false); static final VarHandle VH_PLATFORM_NATIVE_LONG = MethodHandles.byteArrayViewVarHandle(long[].class, ByteOrder.nativeOrder()); - private byte[][] pages; - /** Constructor. */ BigLongArray(long size, BigArrays bigArrays, boolean clearOnResize) { - super(LONG_PAGE_SIZE, bigArrays, clearOnResize); - this.size = size; - pages = new byte[numPages(size)][]; - for (int i = 0; i < pages.length; ++i) { - pages[i] = newBytePage(i); - } + super(LONG_PAGE_SIZE, bigArrays, clearOnResize, size); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java index 0c4a2894698b6..9883df316853c 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.recycler.Recycler; import java.util.Arrays; @@ -81,4 +82,12 @@ public static long estimateRamBytes(final long size) { return ESTIMATOR.ramBytesEstimated(size); } + private Object[] newObjectPage(int page) { + if (recycler != null) { + final Recycler.V v = recycler.objectPage(); + return registerNewPage(v, page, PageCacheRecycler.OBJECT_PAGE_SIZE); + } else { + return new Object[PageCacheRecycler.OBJECT_PAGE_SIZE]; + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 06a5e13a208be..ff8db4bacef8c 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexEventListener; @@ -177,6 +178,7 @@ public interface DirectoryWrapper { private final BooleanSupplier allowExpensiveQueries; private final Map recoveryStateFactories; private final SetOnce indexCommitListener = new SetOnce<>(); + private final MapperMetrics mapperMetrics; /** * Construct the index module for the index with the specified index settings. The index module contains extension points for plugins @@ -195,7 +197,8 @@ public IndexModule( final BooleanSupplier allowExpensiveQueries, final IndexNameExpressionResolver expressionResolver, final Map recoveryStateFactories, - final SlowLogFieldProvider slowLogFieldProvider + final SlowLogFieldProvider slowLogFieldProvider, + final MapperMetrics mapperMetrics ) { this.indexSettings = indexSettings; this.analysisRegistry = analysisRegistry; @@ -206,6 +209,7 @@ public IndexModule( this.allowExpensiveQueries = allowExpensiveQueries; this.expressionResolver = expressionResolver; this.recoveryStateFactories = recoveryStateFactories; + this.mapperMetrics = mapperMetrics; } /** @@ -536,7 +540,8 @@ public IndexService newIndexService( recoveryStateFactory, indexFoldersDeletionListener, snapshotCommitSupplier, - indexCommitListener.get() + indexCommitListener.get(), + mapperMetrics ); success = true; return indexService; @@ -646,7 +651,8 @@ public MapperService newIndexMapperService( throw new UnsupportedOperationException("no index query shard context available"); }, indexSettings.getMode().idFieldMapperWithoutFieldData(), - scriptService + scriptService, + mapperMetrics ); } diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 88db674c3ec2f..1712f824a132c 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -55,6 +55,7 @@ import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsAccounting; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; @@ -159,6 +160,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final IndexNameExpressionResolver expressionResolver; private final Supplier indexSortSupplier; private final ValuesSourceRegistry valuesSourceRegistry; + private final MapperMetrics mapperMetrics; @SuppressWarnings("this-escape") public IndexService( @@ -192,7 +194,8 @@ public IndexService( IndexStorePlugin.RecoveryStateFactory recoveryStateFactory, IndexStorePlugin.IndexFoldersDeletionListener indexFoldersDeletionListener, IndexStorePlugin.SnapshotCommitSupplier snapshotCommitSupplier, - Engine.IndexCommitListener indexCommitListener + Engine.IndexCommitListener indexCommitListener, + MapperMetrics mapperMetrics ) { super(indexSettings); assert indexCreationContext != IndexCreationContext.RELOAD_ANALYZERS @@ -219,7 +222,8 @@ public IndexService( // we parse all percolator queries as they would be parsed on shard 0 () -> newSearchExecutionContext(0, 0, null, System::currentTimeMillis, null, emptyMap()), idFieldMapper, - scriptService + scriptService, + mapperMetrics ); this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, circuitBreakerService); if (indexSettings.getIndexSortConfig().hasIndexSort()) { @@ -264,6 +268,7 @@ public IndexService( this.searchOperationListeners = Collections.unmodifiableList(searchOperationListeners); this.indexingOperationListeners = Collections.unmodifiableList(indexingOperationListeners); this.indexCommitListener = indexCommitListener; + this.mapperMetrics = mapperMetrics; try (var ignored = threadPool.getThreadContext().clearTraceContext()) { // kick off async ops for the first shard in this index this.refreshTask = new AsyncRefreshTask(this); @@ -544,7 +549,8 @@ public synchronized IndexShard createShard( circuitBreakerService, snapshotCommitSupplier, System::nanoTime, - indexCommitListener + indexCommitListener, + mapperMetrics ); eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created"); eventListener.afterIndexShardCreated(indexShard); @@ -742,7 +748,8 @@ public SearchExecutionContext newSearchExecutionContext( allowExpensiveQueries, valuesSourceRegistry, runtimeMappings, - requestSize + requestSize, + mapperMetrics ); } diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 3e191d0ab1e25..0c28601646ac3 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -27,6 +27,7 @@ import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.RoutingFieldMapper; @@ -59,11 +60,13 @@ public final class ShardGetService extends AbstractIndexShardComponent { private final MeanMetric missingMetric = new MeanMetric(); private final CounterMetric currentMetric = new CounterMetric(); private final IndexShard indexShard; + private final MapperMetrics mapperMetrics; - public ShardGetService(IndexSettings indexSettings, IndexShard indexShard, MapperService mapperService) { + public ShardGetService(IndexSettings indexSettings, IndexShard indexShard, MapperService mapperService, MapperMetrics mapperMetrics) { super(indexShard.shardId(), indexSettings); this.mapperService = mapperService; this.indexShard = indexShard; + this.mapperMetrics = mapperMetrics; } public GetStats stats() { @@ -303,8 +306,8 @@ private GetResult innerGetFetch( Map metadataFields = null; DocIdAndVersion docIdAndVersion = get.docIdAndVersion(); SourceLoader loader = forceSyntheticSource - ? new SourceLoader.Synthetic(mappingLookup.getMapping()) - : mappingLookup.newSourceLoader(); + ? new SourceLoader.Synthetic(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()) + : mappingLookup.newSourceLoader(mapperMetrics.sourceFieldMetrics()); StoredFieldLoader storedFieldLoader = buildStoredFieldLoader(storedFields, fetchSourceContext, loader); LeafStoredFieldLoader leafStoredFieldLoader = storedFieldLoader.getLoader(docIdAndVersion.reader.getContext(), null); try { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 9b3496acfd9f3..1b07d93295fe1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -20,6 +20,7 @@ public class DocumentMapper { private final CompressedXContent mappingSource; private final MappingLookup mappingLookup; private final DocumentParser documentParser; + private final MapperMetrics mapperMetrics; /** * Create a new {@link DocumentMapper} that holds empty mappings. @@ -32,14 +33,27 @@ public static DocumentMapper createEmpty(MapperService mapperService) { ); MetadataFieldMapper[] metadata = mapperService.getMetadataMappers().values().toArray(new MetadataFieldMapper[0]); Mapping mapping = new Mapping(root, metadata, null); - return new DocumentMapper(mapperService.documentParser(), mapping, mapping.toCompressedXContent(), IndexVersion.current()); + return new DocumentMapper( + mapperService.documentParser(), + mapping, + mapping.toCompressedXContent(), + IndexVersion.current(), + mapperService.getMapperMetrics() + ); } - DocumentMapper(DocumentParser documentParser, Mapping mapping, CompressedXContent source, IndexVersion version) { + DocumentMapper( + DocumentParser documentParser, + Mapping mapping, + CompressedXContent source, + IndexVersion version, + MapperMetrics mapperMetrics + ) { this.documentParser = documentParser; this.type = mapping.getRoot().name(); this.mappingLookup = MappingLookup.fromMapping(mapping); this.mappingSource = source; + this.mapperMetrics = mapperMetrics; assert mapping.toCompressedXContent().equals(source) || isSyntheticSourceMalformed(source, version) : "provided source [" + source + "] differs from mapping [" + mapping.toCompressedXContent() + "]"; @@ -112,7 +126,7 @@ public void validate(IndexSettings settings, boolean checkLimits) { * Build an empty source loader to validate that the mapping is compatible * with the source loading strategy declared on the source field mapper. */ - sourceMapper().newSourceLoader(mapping()); + sourceMapper().newSourceLoader(mapping(), mapperMetrics.sourceFieldMetrics()); if (settings.getIndexSortConfig().hasIndexSort() && mappers().nestedLookup() != NestedLookup.EMPTY) { throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperMetrics.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperMetrics.java new file mode 100644 index 0000000000000..a0dc28a25d3da --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperMetrics.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +/** + * Groups together all metrics used in mappers. + * Main purpose of this class is to avoid verbosity of passing individual metric instances around. + */ +public record MapperMetrics(SourceFieldMetrics sourceFieldMetrics) { + public static MapperMetrics NOOP = new MapperMetrics(SourceFieldMetrics.NOOP); +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index f91c4f176c6da..19f8da0954c5d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -152,6 +152,7 @@ public boolean isAutoUpdate() { private final IndexVersion indexVersionCreated; private final MapperRegistry mapperRegistry; private final Supplier mappingParserContextSupplier; + private final MapperMetrics mapperMetrics; private volatile DocumentMapper mapper; private volatile long mappingVersion; @@ -165,7 +166,8 @@ public MapperService( MapperRegistry mapperRegistry, Supplier searchExecutionContextSupplier, IdFieldMapper idFieldMapper, - ScriptCompiler scriptCompiler + ScriptCompiler scriptCompiler, + MapperMetrics mapperMetrics ) { this( () -> clusterService.state().getMinTransportVersion(), @@ -176,7 +178,8 @@ public MapperService( mapperRegistry, searchExecutionContextSupplier, idFieldMapper, - scriptCompiler + scriptCompiler, + mapperMetrics ); } @@ -190,7 +193,8 @@ public MapperService( MapperRegistry mapperRegistry, Supplier searchExecutionContextSupplier, IdFieldMapper idFieldMapper, - ScriptCompiler scriptCompiler + ScriptCompiler scriptCompiler, + MapperMetrics mapperMetrics ) { super(indexSettings); this.indexVersionCreated = indexSettings.getIndexVersionCreated(); @@ -218,6 +222,7 @@ public MapperService( this::getMetadataMappers, this::resolveDocumentType ); + this.mapperMetrics = mapperMetrics; } public boolean hasNested() { @@ -547,7 +552,7 @@ private synchronized DocumentMapper doMerge(String type, MergeReason reason, Map } private DocumentMapper newDocumentMapper(Mapping mapping, MergeReason reason, CompressedXContent mappingSource) { - DocumentMapper newMapper = new DocumentMapper(documentParser, mapping, mappingSource, indexVersionCreated); + DocumentMapper newMapper = new DocumentMapper(documentParser, mapping, mappingSource, indexVersionCreated, mapperMetrics); newMapper.validate(indexSettings, reason != MergeReason.MAPPING_RECOVERY); return newMapper; } @@ -780,4 +785,8 @@ public DynamicTemplate[] getAllDynamicTemplates() { public MapperRegistry getMapperRegistry() { return mapperRegistry; } + + public MapperMetrics getMapperMetrics() { + return mapperMetrics; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index bf879f30e5a29..42b6f9bfefd5a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -483,9 +483,9 @@ public boolean isSourceSynthetic() { /** * Build something to load source {@code _source}. */ - public SourceLoader newSourceLoader() { + public SourceLoader newSourceLoader(SourceFieldMetrics metrics) { SourceFieldMapper sfm = mapping.getMetadataMapperByClass(SourceFieldMapper.class); - return sfm == null ? SourceLoader.FROM_STORED_SOURCE : sfm.newSourceLoader(mapping); + return sfm == null ? SourceLoader.FROM_STORED_SOURCE : sfm.newSourceLoader(mapping, metrics); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 1b6d6dd1141f4..aef2464be1528 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -358,9 +358,9 @@ public FieldMapper.Builder getMergeBuilder() { /** * Build something to load source {@code _source}. */ - public SourceLoader newSourceLoader(Mapping mapping) { + public SourceLoader newSourceLoader(Mapping mapping, SourceFieldMetrics metrics) { if (mode == Mode.SYNTHETIC) { - return new SourceLoader.Synthetic(mapping); + return new SourceLoader.Synthetic(mapping, metrics); } return SourceLoader.FROM_STORED_SOURCE; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMetrics.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMetrics.java new file mode 100644 index 0000000000000..0e6ce79fd2170 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMetrics.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.util.function.LongSupplier; + +/** + * Contains metrics for operations involving source field. + */ +public class SourceFieldMetrics { + public static final SourceFieldMetrics NOOP = new SourceFieldMetrics(MeterRegistry.NOOP, () -> 0); + + public static final String SYNTHETIC_SOURCE_LOAD_LATENCY = "es.mapper.synthetic_source.load.latency.histogram"; + + private final LongSupplier relativeTimeSupplier; + + private final LongHistogram syntheticSourceLoadLatency; + + public SourceFieldMetrics(MeterRegistry meterRegistry, LongSupplier relativeTimeSupplier) { + this.syntheticSourceLoadLatency = meterRegistry.registerLongHistogram( + SYNTHETIC_SOURCE_LOAD_LATENCY, + "Time it takes to load fields and construct synthetic source", + "ms" + ); + this.relativeTimeSupplier = relativeTimeSupplier; + } + + public LongSupplier getRelativeTimeSupplier() { + return relativeTimeSupplier; + } + + public void recordSyntheticSourceLoadLatency(TimeValue value) { + this.syntheticSourceLoadLatency.record(value.millis()); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java index dea3494f408d9..a1b95e7a2c8b0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.LeafReader; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.xcontent.XContentBuilder; @@ -84,14 +85,16 @@ public Set requiredStoredFields() { class Synthetic implements SourceLoader { private final Supplier syntheticFieldLoaderLeafSupplier; private final Set requiredStoredFields; + private final SourceFieldMetrics metrics; - public Synthetic(Mapping mapping) { + public Synthetic(Mapping mapping, SourceFieldMetrics metrics) { this.syntheticFieldLoaderLeafSupplier = mapping::syntheticFieldLoader; this.requiredStoredFields = syntheticFieldLoaderLeafSupplier.get() .storedFieldLoaders() .map(Map.Entry::getKey) .collect(Collectors.toSet()); this.requiredStoredFields.add(IgnoredSourceFieldMapper.NAME); + this.metrics = metrics; } @Override @@ -107,7 +110,22 @@ public Set requiredStoredFields() { @Override public Leaf leaf(LeafReader reader, int[] docIdsInLeaf) throws IOException { SyntheticFieldLoader loader = syntheticFieldLoaderLeafSupplier.get(); - return new SyntheticLeaf(loader, loader.docValuesLoader(reader, docIdsInLeaf)); + return new LeafWithMetrics(new SyntheticLeaf(loader, loader.docValuesLoader(reader, docIdsInLeaf)), metrics); + } + + private record LeafWithMetrics(Leaf leaf, SourceFieldMetrics metrics) implements Leaf { + + @Override + public Source source(LeafStoredFieldLoader storedFields, int docId) throws IOException { + long startTime = metrics.getRelativeTimeSupplier().getAsLong(); + + var source = leaf.source(storedFields, docId); + + TimeValue duration = TimeValue.timeValueMillis(metrics.getRelativeTimeSupplier().getAsLong() - startTime); + metrics.recordSyntheticSourceLoadLatency(duration); + + return source; + } } private static class SyntheticLeaf implements Leaf { diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 59453356f0389..7ca0b0bd401ea 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -38,6 +38,7 @@ import org.elasticsearch.index.mapper.MappedFieldType.FielddataOperation; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MappingParserContext; @@ -102,6 +103,7 @@ public class SearchExecutionContext extends QueryRewriteContext { private boolean rewriteToNamedQueries = false; private final Integer requestSize; + private final MapperMetrics mapperMetrics; /** * Build a {@linkplain SearchExecutionContext}. @@ -125,7 +127,8 @@ public SearchExecutionContext( Predicate indexNameMatcher, BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry, - Map runtimeMappings + Map runtimeMappings, + MapperMetrics mapperMetrics ) { this( shardId, @@ -147,7 +150,8 @@ public SearchExecutionContext( allowExpensiveQueries, valuesSourceRegistry, runtimeMappings, - null + null, + mapperMetrics ); } @@ -171,7 +175,8 @@ public SearchExecutionContext( BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry, Map runtimeMappings, - Integer requestSize + Integer requestSize, + MapperMetrics mapperMetrics ) { this( shardId, @@ -196,7 +201,8 @@ public SearchExecutionContext( allowExpensiveQueries, valuesSourceRegistry, parseRuntimeMappings(runtimeMappings, mapperService, indexSettings, mappingLookup), - requestSize + requestSize, + mapperMetrics ); } @@ -221,7 +227,8 @@ public SearchExecutionContext(SearchExecutionContext source) { source.allowExpensiveQueries, source.getValuesSourceRegistry(), source.runtimeMappings, - source.requestSize + source.requestSize, + source.mapperMetrics ); } @@ -245,7 +252,8 @@ private SearchExecutionContext( BooleanSupplier allowExpensiveQueries, ValuesSourceRegistry valuesSourceRegistry, Map runtimeMappings, - Integer requestSize + Integer requestSize, + MapperMetrics mapperMetrics ) { super( parserConfig, @@ -271,6 +279,7 @@ private SearchExecutionContext( this.nestedScope = new NestedScope(); this.searcher = searcher; this.requestSize = requestSize; + this.mapperMetrics = mapperMetrics; } private void reset() { @@ -427,9 +436,9 @@ public boolean isSourceSynthetic() { */ public SourceLoader newSourceLoader(boolean forceSyntheticSource) { if (forceSyntheticSource) { - return new SourceLoader.Synthetic(mappingLookup.getMapping()); + return new SourceLoader.Synthetic(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()); } - return mappingLookup.newSourceLoader(); + return mappingLookup.newSourceLoader(mapperMetrics.sourceFieldMetrics()); } /** @@ -482,7 +491,7 @@ public boolean containsBrokenAnalysis(String field) { public SearchLookup lookup() { if (this.lookup == null) { SourceProvider sourceProvider = isSourceSynthetic() - ? SourceProvider.fromSyntheticSource(mappingLookup.getMapping()) + ? SourceProvider.fromSyntheticSource(mappingLookup.getMapping(), mapperMetrics.sourceFieldMetrics()) : SourceProvider.fromStoredFields(); setLookupProviders(sourceProvider, LeafFieldLookupProvider.fromStoredFields()); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 0092fc61d986d..41efbae5603b7 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -103,6 +103,7 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; @@ -321,7 +322,8 @@ public IndexShard( final CircuitBreakerService circuitBreakerService, final IndexStorePlugin.SnapshotCommitSupplier snapshotCommitSupplier, final LongSupplier relativeTimeInNanosSupplier, - final Engine.IndexCommitListener indexCommitListener + final Engine.IndexCommitListener indexCommitListener, + final MapperMetrics mapperMetrics ) throws IOException { super(shardRouting.shardId(), indexSettings); assert shardRouting.initializing(); @@ -351,7 +353,7 @@ public IndexShard( CollectionUtils.appendToCopyNoNullElements(searchOperationListener, searchStats), logger ); - this.getService = new ShardGetService(indexSettings, this, mapperService); + this.getService = new ShardGetService(indexSettings, this, mapperService, mapperMetrics); this.shardWarmerService = new ShardIndexWarmerService(shardId, indexSettings); this.requestCacheStats = new ShardRequestCache(); this.shardFieldData = new ShardFieldData(); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 94582a0dd9862..c0483ee2c8208 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -101,6 +101,7 @@ import org.elasticsearch.index.get.GetStats; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; @@ -259,6 +260,7 @@ public class IndicesService extends AbstractLifecycleComponent private final ValuesSourceRegistry valuesSourceRegistry; private final TimestampFieldMapperService timestampFieldMapperService; private final CheckedBiConsumer requestCacheKeyDifferentiator; + private final MapperMetrics mapperMetrics; @Override protected void doStart() { @@ -327,6 +329,7 @@ public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, lon this.indexFoldersDeletionListeners = new CompositeIndexFoldersDeletionListener(builder.indexFoldersDeletionListeners); this.snapshotCommitSuppliers = builder.snapshotCommitSuppliers; this.requestCacheKeyDifferentiator = builder.requestCacheKeyDifferentiator; + this.mapperMetrics = builder.mapperMetrics; // doClose() is called when shutting down a node, yet there might still be ongoing requests // that we need to wait for before closing some resources such as the caches. In order to // avoid closing these resources while ongoing requests are still being processed, we use a @@ -747,7 +750,8 @@ private synchronized IndexService createIndexService( () -> allowExpensiveQueries, indexNameExpressionResolver, recoveryStateFactories, - loadSlowLogFieldProvider() + loadSlowLogFieldProvider(), + mapperMetrics ); for (IndexingOperationListener operationListener : indexingOperationListeners) { indexModule.addIndexOperationListener(operationListener); @@ -824,7 +828,8 @@ public synchronized MapperService createIndexMapperServiceForValidation(IndexMet () -> allowExpensiveQueries, indexNameExpressionResolver, recoveryStateFactories, - loadSlowLogFieldProvider() + loadSlowLogFieldProvider(), + mapperMetrics ); pluginsService.forEach(p -> p.onIndexModule(indexModule)); return indexModule.newIndexMapperService(clusterService, parserConfig, mapperRegistry, scriptService); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java index 6d9c2e06c15c8..d56cf3c2c1e1a 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.plugins.EnginePlugin; @@ -71,6 +72,7 @@ public class IndicesServiceBuilder { Map snapshotCommitSuppliers = Map.of(); @Nullable CheckedBiConsumer requestCacheKeyDifferentiator; + MapperMetrics mapperMetrics; public IndicesServiceBuilder settings(Settings settings) { this.settings = settings; @@ -169,6 +171,11 @@ public IndicesServiceBuilder requestCacheKeyDifferentiator( return this; } + public IndicesServiceBuilder mapperMetrics(MapperMetrics mapperMetrics) { + this.mapperMetrics = mapperMetrics; + return this; + } + public IndicesService build() { Objects.requireNonNull(settings); Objects.requireNonNull(pluginsService); @@ -192,6 +199,7 @@ public IndicesService build() { Objects.requireNonNull(recoveryStateFactories); Objects.requireNonNull(indexFoldersDeletionListeners); Objects.requireNonNull(snapshotCommitSuppliers); + Objects.requireNonNull(mapperMetrics); // collect engine factory providers from plugins engineFactoryProviders = pluginsService.filterPlugins(EnginePlugin.class) diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 9585711b5562e..963b8c739f57c 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -111,6 +111,8 @@ import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.mapper.MapperMetrics; +import org.elasticsearch.index.mapper.SourceFieldMetrics; import org.elasticsearch.indices.ExecutorSelector; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesService; @@ -740,6 +742,12 @@ private void construct( ); } + SourceFieldMetrics sourceFieldMetrics = new SourceFieldMetrics( + telemetryProvider.getMeterRegistry(), + threadPool::relativeTimeInMillis + ); + MapperMetrics mapperMetrics = new MapperMetrics(sourceFieldMetrics); + IndicesService indicesService = new IndicesServiceBuilder().settings(settings) .pluginsService(pluginsService) .nodeEnvironment(nodeEnvironment) @@ -759,6 +767,7 @@ private void construct( .metaStateService(metaStateService) .valuesSourceRegistry(searchModule.getValuesSourceRegistry()) .requestCacheKeyDifferentiator(searchModule.getRequestCacheKeyDifferentiator()) + .mapperMetrics(mapperMetrics) .build(); final var parameters = new IndexSettingProvider.Parameters(indicesService::createIndexMapperServiceForValidation); @@ -907,7 +916,8 @@ record PluginServiceInstances( xContentRegistry, indicesModule.getMapperRegistry(), settingsModule.getIndexScopedSettings(), - scriptService + scriptService, + mapperMetrics ); if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(new SystemIndexMetadataUpgradeService(systemIndices, clusterService)); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 8ccc100e31501..dac5ab97f2962 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -984,6 +984,11 @@ class SnapshotsDeletion { // NB only counts stale root blobs today, not shard-level blobs private final AtomicLong bytesDeleted = new AtomicLong(); + /** + * Tracks the shard-level blobs which can be deleted once all the metadata updates have completed. + */ + private final ShardBlobsToDelete shardBlobsToDelete = new ShardBlobsToDelete(); + SnapshotsDeletion( Collection snapshotIds, long originalRepositoryDataGeneration, @@ -1001,36 +1006,6 @@ class SnapshotsDeletion { this.originalRepositoryData = originalRepositoryData; } - /** - * The result of removing a snapshot from a shard folder in the repository. - * - * @param indexId Index that the snapshot was removed from - * @param shardId Shard id that the snapshot was removed from - * @param newGeneration Id of the new index-${uuid} blob that does not include the snapshot any more - * @param blobsToDelete Blob names in the shard directory that have become unreferenced in the new shard generation - */ - private record ShardSnapshotMetaDeleteResult( - IndexId indexId, - int shardId, - ShardGeneration newGeneration, - Collection blobsToDelete - ) {} - - /** - *

- * Shard-level results, see {@link ShardSnapshotMetaDeleteResult}. - *

- *

- * Writes to this list are all synchronized (via {@link #addShardDeleteResult}), and happen-before it is read so the reads need - * no further synchronization - *

- */ - private final List shardDeleteResults = new ArrayList<>(); - - private synchronized void addShardDeleteResult(ShardSnapshotMetaDeleteResult shardDeleteResult) { - shardDeleteResults.add(shardDeleteResult); - } - // --------------------------------------------------------------------------------------------------------------------------------- // The overall flow of execution @@ -1058,11 +1033,10 @@ private void runWithUniqueShardMetadataNaming(SnapshotDeleteListener listener) { // referenced by the new RepositoryData and will be cleaned up by a subsequent delete. // // TODO should we even write the new RepositoryData unless all shard paths have been successfully updated? See #100569. - final ShardGenerations.Builder builder = ShardGenerations.builder(); - for (ShardSnapshotMetaDeleteResult newGen : shardDeleteResults) { - builder.put(newGen.indexId, newGen.shardId, newGen.newGeneration); - } - updateRepositoryData(originalRepositoryData.removeSnapshots(snapshotIds, builder.build()), l); + updateRepositoryData( + originalRepositoryData.removeSnapshots(snapshotIds, shardBlobsToDelete.getUpdatedShardGenerations()), + l + ); }) .addListener( @@ -1073,7 +1047,7 @@ private void runWithUniqueShardMetadataNaming(SnapshotDeleteListener listener) { // Run unreferenced blobs cleanup in parallel to shard-level snapshot deletion try (var refs = new RefCountingRunnable(listener::onDone)) { cleanupUnlinkedRootAndIndicesBlobs(newRepositoryData, refs.acquireListener()); - cleanupUnlinkedShardLevelBlobs(shardDeleteResults, refs.acquireListener()); + cleanupUnlinkedShardLevelBlobs(refs.acquireListener()); } }, listener::onFailure @@ -1098,7 +1072,7 @@ private void runWithLegacyNumericShardMetadataNaming(SnapshotDeleteListener list ActionRunnable.wrap( refs.acquireListener(), l0 -> writeUpdatedShardMetadataAndComputeDeletes( - l0.delegateFailure((l, ignored) -> cleanupUnlinkedShardLevelBlobs(shardDeleteResults, l)) + l0.delegateFailure((l, ignored) -> cleanupUnlinkedShardLevelBlobs(l)) ) ) ); @@ -1264,9 +1238,7 @@ protected void doRun() throws Exception { newGen = tuple.v2() + 1; blobStoreIndexShardSnapshots = tuple.v1(); } - addShardDeleteResult( - deleteFromShardSnapshotMeta(blobStoreIndexShardSnapshots.withRetainedSnapshots(survivingSnapshots), newGen) - ); + deleteFromShardSnapshotMeta(blobStoreIndexShardSnapshots.withRetainedSnapshots(survivingSnapshots), newGen); } /** @@ -1275,14 +1247,11 @@ protected void doRun() throws Exception { * @param indexGeneration generation to write the new shard level level metadata to. If negative a uuid id shard generation * should be used */ - private ShardSnapshotMetaDeleteResult deleteFromShardSnapshotMeta( - BlobStoreIndexShardSnapshots updatedSnapshots, - long indexGeneration - ) { + private void deleteFromShardSnapshotMeta(BlobStoreIndexShardSnapshots updatedSnapshots, long indexGeneration) { ShardGeneration writtenGeneration = null; try { if (updatedSnapshots.snapshots().isEmpty()) { - return new ShardSnapshotMetaDeleteResult( + shardBlobsToDelete.addShardDeleteResult( indexId, shardId, ShardGenerations.DELETED_SHARD_GEN, @@ -1304,7 +1273,7 @@ private ShardSnapshotMetaDeleteResult deleteFromShardSnapshotMeta( final Set survivingSnapshotUUIDs = survivingSnapshots.stream() .map(SnapshotId::getUUID) .collect(Collectors.toSet()); - return new ShardSnapshotMetaDeleteResult( + shardBlobsToDelete.addShardDeleteResult( indexId, shardId, writtenGeneration, @@ -1372,11 +1341,8 @@ private void updateRepositoryData(RepositoryData newRepositoryData, ActionListen // --------------------------------------------------------------------------------------------------------------------------------- // Cleaning up dangling blobs - private void cleanupUnlinkedShardLevelBlobs( - Collection shardDeleteResults, - ActionListener listener - ) { - final Iterator filesToDelete = resolveFilesToDelete(shardDeleteResults); + private void cleanupUnlinkedShardLevelBlobs(ActionListener listener) { + final Iterator filesToDelete = resolveFilesToDelete(); if (filesToDelete.hasNext() == false) { listener.onResponse(null); return; @@ -1392,26 +1358,25 @@ private void cleanupUnlinkedShardLevelBlobs( })); } - private Iterator resolveFilesToDelete(Collection deleteResults) { + private Iterator resolveFilesToDelete() { // Somewhat surprisingly we can construct the String representations of the blobs to delete with BlobPath#buildAsString even // on Windows, because the JDK translates / to \ automatically (and all other blob stores use / as the path separator anyway) final String basePath = basePath().buildAsString(); final int basePathLen = basePath.length(); - return Stream.concat( - // Unreferenced shard-level blobs - deleteResults.stream().flatMap(shardResult -> { - final String shardPath = shardPath(shardResult.indexId, shardResult.shardId).buildAsString(); - return shardResult.blobsToDelete.stream().map(blob -> shardPath + blob); - }), - // Unreferenced index metadata - originalRepositoryData.indexMetaDataToRemoveAfterRemovingSnapshots(snapshotIds).entrySet().stream().flatMap(entry -> { - final String indexContainerPath = indexPath(entry.getKey()).buildAsString(); - return entry.getValue().stream().map(id -> indexContainerPath + INDEX_METADATA_FORMAT.blobName(id)); - }) - ).map(absolutePath -> { + return Iterators.map(Iterators.concat(shardBlobsToDelete.getBlobPaths(), getUnreferencedIndexMetadata()), absolutePath -> { assert absolutePath.startsWith(basePath); return absolutePath.substring(basePathLen); - }).iterator(); + }); + } + + private Iterator getUnreferencedIndexMetadata() { + return Iterators.flatMap( + originalRepositoryData.indexMetaDataToRemoveAfterRemovingSnapshots(snapshotIds).entrySet().iterator(), + entry -> { + final String indexContainerPath = indexPath(entry.getKey()).buildAsString(); + return Iterators.map(entry.getValue().iterator(), id -> indexContainerPath + INDEX_METADATA_FORMAT.blobName(id)); + } + ); } /** @@ -1545,6 +1510,62 @@ private void logStaleRootLevelBlobs( } } + /** + * Tracks the shard-level blobs which can be deleted once all the metadata updates have completed during a snapshot deletion. + */ + class ShardBlobsToDelete { + + /** + * The result of removing a snapshot from a shard folder in the repository. + * + * @param indexId Index that the snapshot was removed from + * @param shardId Shard id that the snapshot was removed from + * @param newGeneration Id of the new index-${uuid} blob that does not include the snapshot any more + * @param blobsToDelete Blob names in the shard directory that have become unreferenced in the new shard generation + */ + private record ShardSnapshotMetaDeleteResult( + IndexId indexId, + int shardId, + ShardGeneration newGeneration, + Collection blobsToDelete + ) {} + + /** + *

+ * Shard-level results, see {@link ShardSnapshotMetaDeleteResult}. + *

+ *

+ * Writes to this list are all synchronized (via {@link #addShardDeleteResult}), and happen-before it is read so the reads need + * no further synchronization + *

+ */ + private final List shardDeleteResults = new ArrayList<>(); + + synchronized void addShardDeleteResult( + IndexId indexId, + int shardId, + ShardGeneration newGeneration, + Collection blobsToDelete + ) { + shardDeleteResults.add(new ShardSnapshotMetaDeleteResult(indexId, shardId, newGeneration, blobsToDelete)); + } + + public ShardGenerations getUpdatedShardGenerations() { + final var builder = ShardGenerations.builder(); + for (var shardResult : shardDeleteResults) { + builder.put(shardResult.indexId, shardResult.shardId, shardResult.newGeneration); + } + return builder.build(); + } + + public Iterator getBlobPaths() { + return Iterators.flatMap(shardDeleteResults.iterator(), shardResult -> { + final var shardPath = shardPath(shardResult.indexId, shardResult.shardId).buildAsString(); + return Iterators.map(shardResult.blobsToDelete.iterator(), blob -> shardPath + blob); + }); + } + } + @Override public void finalizeSnapshot(final FinalizeSnapshotContext finalizeSnapshotContext) { final long repositoryStateId = finalizeSnapshotContext.repositoryStateId(); diff --git a/server/src/main/java/org/elasticsearch/search/lookup/EmptySource.java b/server/src/main/java/org/elasticsearch/search/lookup/EmptySource.java new file mode 100644 index 0000000000000..bfaf9620ade74 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/lookup/EmptySource.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.lookup; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.EnumMap; +import java.util.Map; + +final class EmptySource implements Source { + + private static final EnumMap values = new EnumMap<>(XContentType.class); + + static { + for (XContentType value : XContentType.values()) { + values.put(value, new EmptySource(value)); + } + } + + static EmptySource forType(XContentType type) { + return values.get(type); + } + + private final XContentType type; + + private final BytesReference sourceRef; + + private EmptySource(XContentType type) { + this.type = type; + try { + sourceRef = new BytesArray( + BytesReference.toBytes(BytesReference.bytes(new XContentBuilder(type.xContent(), new BytesStreamOutput()).value(Map.of()))) + ); + } catch (IOException e) { + throw new AssertionError("impossible", e); + } + } + + @Override + public XContentType sourceContentType() { + return type; + } + + @Override + public Map source() { + return Map.of(); + } + + @Override + public BytesReference internalSourceRef() { + return sourceRef; + } + + @Override + public Source filter(SourceFilter sourceFilter) { + return this; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/lookup/Source.java b/server/src/main/java/org/elasticsearch/search/lookup/Source.java index 851044d1efcec..7098cce548c53 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/Source.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/Source.java @@ -74,7 +74,7 @@ default Object extractValue(String path, @Nullable Object nullValue) { * An empty Source, represented as an empty map */ static Source empty(XContentType xContentType) { - return Source.fromMap(Map.of(), xContentType == null ? XContentType.JSON : xContentType); + return EmptySource.forType(xContentType == null ? XContentType.JSON : xContentType); } /** @@ -148,6 +148,9 @@ public Source filter(SourceFilter sourceFilter) { */ static Source fromMap(Map map, XContentType xContentType) { Map sourceMap = map == null ? Map.of() : map; + if (sourceMap.isEmpty()) { + return empty(xContentType); + } return new Source() { @Override public XContentType sourceContentType() { diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java index 27d48613820cd..8a180d4f11ec7 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceProvider.java @@ -11,6 +11,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.SourceFieldMetrics; +import org.elasticsearch.index.mapper.SourceLoader; import java.io.IOException; @@ -45,7 +47,7 @@ static SourceProvider fromStoredFields() { * but it is not safe to use this to access documents from the same segment across * multiple threads. */ - static SourceProvider fromSyntheticSource(Mapping mapping) { - return new SyntheticSourceProvider(mapping); + static SourceProvider fromSyntheticSource(Mapping mapping, SourceFieldMetrics metrics) { + return new SyntheticSourceProvider(new SourceLoader.Synthetic(mapping, metrics)); } } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SyntheticSourceProvider.java b/server/src/main/java/org/elasticsearch/search/lookup/SyntheticSourceProvider.java index 74327e16d20ea..bccfc22dc7e95 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SyntheticSourceProvider.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SyntheticSourceProvider.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; -import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.SourceLoader; import java.io.IOException; @@ -25,8 +24,8 @@ class SyntheticSourceProvider implements SourceProvider { private final SourceLoader sourceLoader; private volatile SyntheticSourceLeafLoader[] leafLoaders; - SyntheticSourceProvider(Mapping mapping) { - sourceLoader = new SourceLoader.Synthetic(mapping); + SyntheticSourceProvider(SourceLoader sourceLoader) { + this.sourceLoader = sourceLoader; } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 87fe732d156c5..5a5d5f46de413 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -145,7 +145,7 @@ protected DataStream mutateInstance(DataStream instance) { if (randomBoolean() || autoShardingEvent == null) { // If we're mutating the auto sharding event of the failure store, we need to ensure there's at least one failure index. if (failureIndices.isEmpty()) { - failureIndices = DataStreamTestHelper.randomIndexInstances(); + failureIndices = DataStreamTestHelper.randomNonEmptyIndexInstances(); failureStore = true; } autoShardingEvent = new DataStreamAutoShardingEvent( diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java index 388cbc83b7c6f..99f78f95dd36c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; @@ -140,7 +141,8 @@ private IndexMetadataVerifier getIndexMetadataVerifier() { xContentRegistry(), new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - null + null, + MapperMetrics.NOOP ); } diff --git a/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java b/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java new file mode 100644 index 0000000000000..bfb03ea9496e5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java @@ -0,0 +1,427 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matcher; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; +import java.time.format.ResolverStyle; +import java.time.format.SignStyle; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalQueries; +import java.time.temporal.ValueRange; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static java.time.temporal.ChronoField.DAY_OF_MONTH; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.MONTH_OF_YEAR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import static java.time.temporal.ChronoField.YEAR; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class Iso8601ParserTests extends ESTestCase { + + private static Iso8601Parser defaultParser() { + return new Iso8601Parser(Set.of(), true, Map.of()); + } + + private static Matcher hasResult(DateTime dateTime) { + return transformedMatch(Iso8601Parser.Result::result, equalTo(dateTime)); + } + + private static Matcher hasError(int parseError) { + return transformedMatch(Iso8601Parser.Result::errorIndex, equalTo(parseError)); + } + + public void testStrangeParses() { + assertThat(defaultParser().tryParse("-9999-01-01", null), hasResult(new DateTime(-9999, 1, 1, null, null, null, null, null, null))); + assertThat(defaultParser().tryParse("1000", null), hasResult(new DateTime(1000, null, null, null, null, null, null, null, null))); + assertThat(defaultParser().tryParse("2023-02-02T", null), hasResult(new DateTime(2023, 2, 2, null, null, null, null, null, null))); + + // these are accepted by the previous formatters, but are not valid ISO8601 + assertThat(defaultParser().tryParse("2023-01-01T12:00:00.01,02", null), hasError(22)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Europe/Paris+0400", null), hasError(19)); + } + + public void testOutOfRange() { + assertThat(defaultParser().tryParse("2023-13-12", null), hasError(5)); + assertThat(defaultParser().tryParse("2023-12-32", null), hasError(8)); + assertThat(defaultParser().tryParse("2023-12-31T24", null), hasError(11)); + assertThat(defaultParser().tryParse("2023-12-31T23:60", null), hasError(14)); + assertThat(defaultParser().tryParse("2023-12-31T23:59:60", null), hasError(17)); + assertThat(defaultParser().tryParse("2023-12-31T23:59:59+18:30", null), hasError(19)); + } + + public void testMandatoryFields() { + assertThat( + new Iso8601Parser(Set.of(YEAR), true, Map.of()).tryParse("2023", null), + hasResult(new DateTime(2023, null, null, null, null, null, null, null, null)) + ); + assertThat(new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR), true, Map.of()).tryParse("2023", null), hasError(4)); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR), true, Map.of()).tryParse("2023-06", null), + hasResult(new DateTime(2023, 6, null, null, null, null, null, null, null)) + ); + assertThat(new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH), true, Map.of()).tryParse("2023-06", null), hasError(7)); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH), true, Map.of()).tryParse("2023-06-20", null), + hasResult(new DateTime(2023, 6, 20, null, null, null, null, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY), false, Map.of()).tryParse("2023-06-20", null), + hasError(10) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY), false, Map.of()).tryParse("2023-06-20T15", null), + hasResult(new DateTime(2023, 6, 20, 15, 0, 0, 0, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15", + null + ), + hasError(13) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15Z", + null + ), + hasError(13) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15:48", + null + ), + hasResult(new DateTime(2023, 6, 20, 15, 48, 0, 0, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48", null), + hasError(16) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48Z", null), + hasError(16) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48:09", null), + hasResult(new DateTime(2023, 6, 20, 15, 48, 9, 0, null, null)) + ); + } + + public void testParseNanos() { + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.5", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,5", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000_000, null, null)) + ); + + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.05", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.0005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,00005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,0000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.00000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,000000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5, null, null)) + ); + + // too many nanos + assertThat(defaultParser().tryParse("2023-01-01T12:00:00.0000000005", null), hasError(29)); + } + + private static Matcher hasTimezone(ZoneId offset) { + return transformedMatch(r -> r.result().query(TemporalQueries.zone()), equalTo(offset)); + } + + public void testParseTimezones() { + // using defaults + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", null), hasTimezone(null)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", ZoneOffset.UTC), hasTimezone(ZoneOffset.UTC)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", ZoneOffset.ofHours(-3)), hasTimezone(ZoneOffset.ofHours(-3))); + + // timezone specified + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Z", null), hasTimezone(ZoneOffset.UTC)); + + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-05", null), hasTimezone(ZoneOffset.ofHours(-5))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+11", null), hasTimezone(ZoneOffset.ofHours(11))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+0830", null), hasTimezone(ZoneOffset.ofHoursMinutes(8, 30))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-0415", null), hasTimezone(ZoneOffset.ofHoursMinutes(-4, -15))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+08:30", null), hasTimezone(ZoneOffset.ofHoursMinutes(8, 30))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-04:15", null), hasTimezone(ZoneOffset.ofHoursMinutes(-4, -15))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+011030", null), hasTimezone(ZoneOffset.ofHoursMinutesSeconds(1, 10, 30))); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00-074520", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(-7, -45, -20)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00+01:10:30", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(1, 10, 30)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00-07:45:20", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(-7, -45, -20)) + ); + + assertThat(defaultParser().tryParse("2023-01-01T12:00:00GMT", null), hasTimezone(ZoneId.of("GMT"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UTC", null), hasTimezone(ZoneId.of("UTC"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UT", null), hasTimezone(ZoneId.of("UT"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00GMT+3", null), hasTimezone(ZoneId.of("GMT+3"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UTC-4", null), hasTimezone(ZoneId.of("UTC-4"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UT+6", null), hasTimezone(ZoneId.of("UT+6"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Europe/Paris", null), hasTimezone(ZoneId.of("Europe/Paris"))); + + // we could be more specific in the error index for invalid timezones, + // but that would require keeping track & propagating Result objects within date-time parsing just for the ZoneId + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+04:0030", null), hasError(19)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+0400:30", null), hasError(19)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Invalid", null), hasError(19)); + } + + private static void assertEquivalent(String text, DateTimeFormatter formatter) { + TemporalAccessor expected = formatter.parse(text); + TemporalAccessor actual = defaultParser().tryParse(text, null).result(); + assertThat(actual, is(notNullValue())); + + assertThat(actual.query(TemporalQueries.localDate()), equalTo(expected.query(TemporalQueries.localDate()))); + assertThat(actual.query(TemporalQueries.localTime()), equalTo(expected.query(TemporalQueries.localTime()))); + assertThat(actual.query(TemporalQueries.zone()), equalTo(expected.query(TemporalQueries.zone()))); + } + + private static void assertEquivalentFailure(String text, DateTimeFormatter formatter) { + DateTimeParseException expected = expectThrows(DateTimeParseException.class, () -> formatter.parse(text)); + int error = defaultParser().tryParse(text, null).errorIndex(); + assertThat(error, greaterThanOrEqualTo(0)); + + assertThat(error, equalTo(expected.getErrorIndex())); + } + + public void testEquivalence() { + // test that Iso8601Parser produces the same output as DateTimeFormatter + DateTimeFormatter mandatoryFormatter = new DateTimeFormatterBuilder().append(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .appendOffset("+HHmm", "Z") + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + // just checking timezones/ids here + assertEquivalent("2023-01-01T12:00:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Z", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UT", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UTC", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00GMT", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+0500", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05:00:30", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-07", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-0715", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-07:15", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UTC+05:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00GMT-09:45:30", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Zulu", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Europe/Paris", mandatoryFormatter); + + assertEquivalentFailure("2023-01-01T12:00:00+5", mandatoryFormatter); + assertEquivalentFailure("2023-01-01T12:00:00-7", mandatoryFormatter); + assertEquivalentFailure("2023-01-01T12:00:00InvalidTimeZone", mandatoryFormatter); + + DateTimeFormatter allFieldsOptional = new DateTimeFormatterBuilder().appendValue(YEAR, 4, 4, SignStyle.EXCEEDS_PAD) + .optionalStart() + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .optionalStart() + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .optionalStart() + .appendLiteral('T') + .appendValue(HOUR_OF_DAY, 2) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .appendOffset("+HHmm", "Z") + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + assertEquivalent("2023", allFieldsOptional); + assertEquivalent("2023-04", allFieldsOptional); + assertEquivalent("2023-04-08", allFieldsOptional); + assertEquivalent("2023-04-08T13", allFieldsOptional); + assertEquivalent("2023-04-08T13:45", allFieldsOptional); + assertEquivalent("2023-04-08T13:45:50", allFieldsOptional); + assertEquivalent("-2023-04-08T13:45:50", allFieldsOptional); + } + + private static int randomValue(ValueRange range) { + assert range.isIntValue(); + return randomIntBetween((int) range.getMinimum(), (int) range.getMaximum()); + } + + public void testDefaults() { + Map defaults = Map.of( + MONTH_OF_YEAR, + randomValue(MONTH_OF_YEAR.range()), + DAY_OF_MONTH, + randomValue(DAY_OF_MONTH.range()), + HOUR_OF_DAY, + randomValue(HOUR_OF_DAY.range()), + MINUTE_OF_HOUR, + randomValue(MINUTE_OF_HOUR.range()), + SECOND_OF_MINUTE, + randomValue(SECOND_OF_MINUTE.range()), + NANO_OF_SECOND, + randomValue(NANO_OF_SECOND.range()) + ); + + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023", null), + hasResult( + new DateTime( + 2023, + defaults.get(MONTH_OF_YEAR), + defaults.get(DAY_OF_MONTH), + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01", null), + hasResult( + new DateTime( + 2023, + 1, + defaults.get(DAY_OF_MONTH), + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01", null), + hasResult( + new DateTime( + 2023, + 1, + 1, + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00", null), + hasResult( + new DateTime( + 2023, + 1, + 1, + 0, + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, defaults.get(SECOND_OF_MINUTE), defaults.get(NANO_OF_SECOND), null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00:00", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, 0, defaults.get(NANO_OF_SECOND), null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00:00.0", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, 0, 0, null, null)) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 22869ad37524c..4e12627a158da 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.MetadataUpgrader; import org.elasticsearch.test.ESTestCase; @@ -193,7 +194,7 @@ private static class MockIndexMetadataVerifier extends IndexMetadataVerifier { private final boolean upgrade; MockIndexMetadataVerifier(boolean upgrade) { - super(Settings.EMPTY, null, null, null, null, null); + super(Settings.EMPTY, null, null, null, null, null, MapperMetrics.NOOP); this.upgrade = upgrade; } diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 977ab9bcedd75..c3c94c2730366 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -60,6 +60,7 @@ import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; @@ -235,7 +236,8 @@ public void testWrapperIsBound() throws IOException { () -> true, indexNameExpressionResolver, Collections.emptyMap(), - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); module.setReaderWrapper(s -> new Wrapper()); @@ -261,7 +263,8 @@ public void testRegisterIndexStore() throws IOException { () -> true, indexNameExpressionResolver, Collections.emptyMap(), - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); final IndexService indexService = newIndexService(module); @@ -285,7 +288,8 @@ public void testDirectoryWrapper() throws IOException { () -> true, indexNameExpressionResolver, Collections.emptyMap(), - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); module.setDirectoryWrapper(new TestDirectoryWrapper()); @@ -637,7 +641,8 @@ public void testRegisterCustomRecoveryStateFactory() throws IOException { () -> true, indexNameExpressionResolver, recoveryStateFactories, - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); final IndexService indexService = newIndexService(module); @@ -658,7 +663,8 @@ public void testIndexCommitListenerIsBound() throws IOException, ExecutionExcept () -> true, indexNameExpressionResolver, Collections.emptyMap(), - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); final AtomicLong lastAcquiredPrimaryTerm = new AtomicLong(); @@ -759,7 +765,8 @@ private static IndexModule createIndexModule( () -> true, indexNameExpressionResolver, Collections.emptyMap(), - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index cce5e4c057a97..ffb3cc1943bff 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; @@ -115,7 +116,8 @@ private CodecService createCodecService() throws IOException { mapperRegistry, () -> null, settings.getMode().idFieldMapperWithoutFieldData(), - ScriptCompiler.NONE + ScriptCompiler.NONE, + MapperMetrics.NOOP ); return new CodecService(service, BigArrays.NON_RECYCLING_INSTANCE); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index 726ec8561535e..c06fe5d8a89d2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -222,7 +222,8 @@ public void testRangeQuery() throws IOException { null, () -> true, null, - Collections.emptyMap() + Collections.emptyMap(), + MapperMetrics.NOOP ); MappedFieldType ft = new DateFieldType("field"); String date1 = "2015-10-12T14:10:55"; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java index 06e70e84bbb67..48de1dbe88dbd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java @@ -97,7 +97,7 @@ public void testSyntheticSourceMany() throws IOException { iw.addDocument(mapper.documentMapper().parse(source(b -> b.field("doc", doc).field(CONTENT_TYPE, c))).rootDoc()); } }, reader -> { - SourceLoader loader = mapper.mappingLookup().newSourceLoader(); + SourceLoader loader = mapper.mappingLookup().newSourceLoader(SourceFieldMetrics.NOOP); assertThat(loader.requiredStoredFields(), Matchers.contains("_ignored_source")); for (LeafReaderContext leaf : reader.leaves()) { int[] docIds = IntStream.range(0, leaf.reader().maxDoc()).toArray(); @@ -129,7 +129,7 @@ public void testSyntheticSourceManyDoNotHave() throws IOException { })).rootDoc()); } }, reader -> { - SourceLoader loader = mapper.mappingLookup().newSourceLoader(); + SourceLoader loader = mapper.mappingLookup().newSourceLoader(SourceFieldMetrics.NOOP); assertThat(loader.requiredStoredFields(), Matchers.contains("_ignored_source")); for (LeafReaderContext leaf : reader.leaves()) { int[] docIds = IntStream.range(0, leaf.reader().maxDoc()).toArray(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java index c210fb0654683..633ffbf1c3a3a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperTests.java @@ -69,7 +69,13 @@ public void testAddFields() throws Exception { assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue()); // but merged should DocumentParser documentParser = new DocumentParser(null, null); - DocumentMapper mergedMapper = new DocumentMapper(documentParser, merged, merged.toCompressedXContent(), IndexVersion.current()); + DocumentMapper mergedMapper = new DocumentMapper( + documentParser, + merged, + merged.toCompressedXContent(), + IndexVersion.current(), + MapperMetrics.NOOP + ); assertThat(mergedMapper.mappers().getMapper("age"), notNullValue()); assertThat(mergedMapper.mappers().getMapper("obj1.prop1"), notNullValue()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index d3dd585788867..d417d6c647d05 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -2623,7 +2623,8 @@ same name need to be part of the same mappings (hence the same document). If th mapperService.documentParser(), newMapping, newMapping.toCompressedXContent(), - IndexVersion.current() + IndexVersion.current(), + MapperMetrics.NOOP ); ParsedDocument doc2 = newDocMapper.parse(source(""" { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java index 369b926110eaa..5a11f7a3c0765 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IndexFieldTypeTests.java @@ -83,7 +83,8 @@ private SearchExecutionContext createContext() { indexNameMatcher, () -> true, null, - Collections.emptyMap() + Collections.emptyMap(), + MapperMetrics.NOOP ); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java index aa5a772ecc445..65228c079e598 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -394,7 +394,7 @@ protected Source getSourceFor(CheckedConsumer mapp iw.addDocument(doc); iw.close(); try (DirectoryReader reader = DirectoryReader.open(directory)) { - SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping()); + SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), SourceFieldMetrics.NOOP); Source syntheticSource = provider.getSource(getOnlyLeafReader(reader).getContext(), 0); return syntheticSource; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceLoaderTelemetryTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceLoaderTelemetryTests.java new file mode 100644 index 0000000000000..1c88cbb0d8592 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceLoaderTelemetryTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.search.lookup.SourceProvider; +import org.elasticsearch.telemetry.TestTelemetryPlugin; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +public class SourceLoaderTelemetryTests extends MapperServiceTestCase { + private final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); + + @Override + protected Collection getPlugins() { + return List.of(telemetryPlugin); + } + + @Override + public void testFieldHasValue() {} + + @Override + public void testFieldHasValueWithEmptyFieldInfos() {} + + public void testSyntheticSourceTelemetry() throws IOException { + var mapping = syntheticSourceMapping(b -> { b.startObject("kwd").field("type", "keyword").endObject(); }); + var mapper = createDocumentMapper(mapping); + + try (Directory directory = newDirectory()) { + RandomIndexWriter iw = new RandomIndexWriter(random(), directory); + LuceneDocument doc = mapper.parse(source(b -> b.field("kwd", "foo"))).rootDoc(); + iw.addDocument(doc); + iw.close(); + try (DirectoryReader reader = DirectoryReader.open(directory)) { + SourceProvider provider = SourceProvider.fromSyntheticSource( + mapper.mapping(), + createTestMapperMetrics().sourceFieldMetrics() + ); + Source synthetic = provider.getSource(getOnlyLeafReader(reader).getContext(), 0); + assertEquals(synthetic.source().get("kwd"), "foo"); + } + } + + var measurements = telemetryPlugin.getLongHistogramMeasurement(SourceFieldMetrics.SYNTHETIC_SOURCE_LOAD_LATENCY); + assertEquals(1, measurements.size()); + // test implementation of time provider always has a gap of 1 between values + assertEquals(measurements.get(0).getLong(), 1); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceLoaderTests.java index aa30efb7dbc51..848f8878ffb98 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceLoaderTests.java @@ -20,7 +20,7 @@ public void testNonSynthetic() throws IOException { b.startObject("o").field("type", "object").endObject(); b.startObject("kwd").field("type", "keyword").endObject(); })); - assertFalse(mapper.mappers().newSourceLoader().reordersFieldValues()); + assertFalse(mapper.mappers().newSourceLoader(SourceFieldMetrics.NOOP).reordersFieldValues()); } public void testEmptyObject() throws IOException { @@ -28,7 +28,7 @@ public void testEmptyObject() throws IOException { b.startObject("o").field("type", "object").endObject(); b.startObject("kwd").field("type", "keyword").endObject(); })); - assertTrue(mapper.mappers().newSourceLoader().reordersFieldValues()); + assertTrue(mapper.mappers().newSourceLoader(SourceFieldMetrics.NOOP).reordersFieldValues()); assertThat(syntheticSource(mapper, b -> b.field("kwd", "foo")), equalTo(""" {"kwd":"foo"}""")); } diff --git a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java index 2d89eb76cb332..5f62c655e371d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MappedFieldType.Relation; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentFactory; @@ -47,7 +48,8 @@ public void testRewriteMissingField() throws Exception { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); RangeQueryBuilder range = new RangeQueryBuilder("foo"); assertEquals(Relation.DISJOINT, range.getRelation(context)); @@ -87,7 +89,8 @@ public void testRewriteMissingReader() throws Exception { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // can't make assumptions on a missing reader, so it must return INTERSECT @@ -129,7 +132,8 @@ public void testRewriteEmptyReader() throws Exception { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // no values -> DISJOINT diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index e541c680ada1b..8d260ca7f8556 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.mapper.LongScriptFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; @@ -524,7 +525,8 @@ private static SearchExecutionContext createSearchExecutionContext( null, () -> true, null, - runtimeMappings + runtimeMappings, + MapperMetrics.NOOP ); } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index ca7dd2683f211..60d73f873bbd4 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -88,6 +88,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.ShardLongFieldRange; @@ -245,7 +246,8 @@ public Transport.Connection getConnection(DiscoveryNode node) { xContentRegistry, null, null, - null + null, + MapperMetrics.NOOP ) { // metadata upgrader should do nothing @Override diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index 5a736b4e1e9dd..bfbd92bd5df22 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -20,7 +21,6 @@ import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobPath; @@ -29,8 +29,8 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; @@ -41,11 +41,15 @@ import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.junit.AfterClass; +import org.junit.BeforeClass; import java.util.Arrays; import java.util.Collection; @@ -58,21 +62,31 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isA; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class RepositoriesServiceTests extends ESTestCase { + private static ThreadPool threadPool; + + private ClusterService clusterService; private RepositoriesService repositoriesService; + @BeforeClass + public static void createThreadPool() { + threadPool = new TestThreadPool(RepositoriesService.class.getName()); + } + + @AfterClass + public static void terminateThreadPool() { + if (threadPool != null) { + threadPool.shutdownNow(); + threadPool = null; + } + } + @Override public void setUp() throws Exception { super.setUp(); - ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - ThreadPool threadPool = mock(ThreadPool.class); - when(threadPool.getThreadContext()).thenReturn(threadContext); - when(threadPool.info(ThreadPool.Names.SNAPSHOT)).thenReturn( - new ThreadPool.Info(ThreadPool.Names.SNAPSHOT, ThreadPool.ThreadPoolType.FIXED, randomIntBetween(1, 10)) - ); + final TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), @@ -82,10 +96,18 @@ public void setUp() throws Exception { null, Collections.emptySet() ); - final ClusterApplierService clusterApplierService = mock(ClusterApplierService.class); - when(clusterApplierService.threadPool()).thenReturn(threadPool); - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.getClusterApplierService()).thenReturn(clusterApplierService); + + clusterService = ClusterServiceUtils.createClusterService(threadPool); + + // cluster utils publisher does not call AckListener, making some method calls hang indefinitely + // in this test we have a single master node, and it acknowledges cluster state immediately + final var publisher = ClusterServiceUtils.createClusterStatePublisher(clusterService.getClusterApplierService()); + clusterService.getMasterService().setClusterStatePublisher((evt, pub, ack) -> { + publisher.publish(evt, pub, ack); + ack.onCommit(TimeValue.ZERO); + ack.onNodeAck(clusterService.localNode(), null); + }); + Map typesRegistry = Map.of( TestRepository.TYPE, TestRepository::new, @@ -98,16 +120,25 @@ public void setUp() throws Exception { ); repositoriesService = new RepositoriesService( Settings.EMPTY, - mock(ClusterService.class), + clusterService, transportService, typesRegistry, typesRegistry, threadPool, List.of() ); + + clusterService.start(); repositoriesService.start(); } + @Override + public void tearDown() throws Exception { + super.tearDown(); + clusterService.stop(); + repositoriesService.stop(); + } + public void testRegisterInternalRepository() { String repoName = "name"; expectThrows(RepositoryMissingException.class, () -> repositoriesService.repository(repoName)); @@ -283,18 +314,11 @@ public void testRegisterRepositorySuccessAfterCreationFailed() { // 2. repository creation successfully when current node become master node and repository is put again var request = new PutRepositoryRequest().name(repoName).type(TestRepository.TYPE); - repositoriesService.registerRepository(request, new ActionListener<>() { - @Override - public void onResponse(AcknowledgedResponse acknowledgedResponse) { - assertTrue(acknowledgedResponse.isAcknowledged()); - assertThat(repositoriesService.repository(repoName), isA(TestRepository.class)); - } - - @Override - public void onFailure(Exception e) { - assert false : e; - } - }); + var resultListener = new SubscribableListener(); + repositoriesService.registerRepository(request, resultListener); + var response = safeAwait(resultListener); + assertTrue(response.isAcknowledged()); + assertThat(repositoriesService.repository(repoName), isA(TestRepository.class)); } private ClusterState createClusterStateWithRepo(String repoName, String repoType) { @@ -320,11 +344,10 @@ private void assertThrowsOnRegister(String repoName) { private static class TestRepository implements Repository { private static final String TYPE = "internal"; + private final RepositoryMetadata metadata; private boolean isClosed; private boolean isStarted; - private final RepositoryMetadata metadata; - private TestRepository(RepositoryMetadata metadata) { this.metadata = metadata; } @@ -357,7 +380,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna @Override public void getRepositoryData(Executor responseExecutor, ActionListener listener) { - listener.onResponse(null); + listener.onResponse(RepositoryData.EMPTY); } @Override diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index adfc333e9dc7e..e18e327734495 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.RefCountingRunnable; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -47,6 +48,7 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -56,6 +58,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; @@ -71,6 +74,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -494,4 +498,41 @@ private Environment createEnvironment() { .build() ); } + + public void testShardBlobsToDelete() { + final var repo = setupRepo(); + final var shardBlobsToDelete = repo.new ShardBlobsToDelete(); + final var expectedShardGenerations = ShardGenerations.builder(); + final var expectedBlobsToDelete = new HashSet(); + + final var countDownLatch = new CountDownLatch(1); + try (var refs = new RefCountingRunnable(countDownLatch::countDown)) { + for (int index = between(0, 10); index > 0; index--) { + final var indexId = new IndexId(randomIdentifier(), randomUUID()); + for (int shard = between(1, 3); shard > 0; shard--) { + final var shardId = shard; + final var shardGeneration = new ShardGeneration(randomUUID()); + expectedShardGenerations.put(indexId, shard, shardGeneration); + final var blobsToDelete = randomList(10, ESTestCase::randomIdentifier); + final var indexPath = repo.basePath().add("indices").add(indexId.getId()).add(Integer.toString(shard)).buildAsString(); + for (final var blobToDelete : blobsToDelete) { + expectedBlobsToDelete.add(indexPath + blobToDelete); + } + + repo.threadPool() + .generic() + .execute( + ActionRunnable.run( + refs.acquireListener(), + () -> shardBlobsToDelete.addShardDeleteResult(indexId, shardId, shardGeneration, blobsToDelete) + ) + ); + } + } + } + safeAwait(countDownLatch); + assertEquals(expectedShardGenerations.build(), shardBlobsToDelete.getUpdatedShardGenerations()); + shardBlobsToDelete.getBlobPaths().forEachRemaining(s -> assertTrue(expectedBlobsToDelete.remove(s))); + assertThat(expectedBlobsToDelete, empty()); + } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index be36d72304bd0..6dab9e802b851 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.mapper.LongFieldScriptTests; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.mapper.NestedPathFieldMapper; @@ -1686,7 +1687,8 @@ private static SearchExecutionContext newSearchExecutionContext( null, null, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index fc32080f06fdc..7c09090715e85 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -313,7 +314,8 @@ public void testBuildSearchContextHighlight() throws IOException { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ) { @Override public MappedFieldType getFieldType(String name) { diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 7113117a4d7fa..3193655b02747 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -156,7 +157,8 @@ public void testBuildRescoreSearchContext() throws ElasticsearchParseException, null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ) { @Override public MappedFieldType getFieldType(String name) { @@ -222,7 +224,8 @@ public void testRewritingKeepsSettings() throws IOException { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ) { @Override public MappedFieldType getFieldType(String name) { diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index e0c12a594bef0..5fcd4eeeb2636 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; @@ -215,7 +216,8 @@ protected final SearchExecutionContext createMockSearchExecutionContext(IndexSea null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ) { @Override diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 928cc53751545..a84df1ba2acba 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; @@ -187,7 +188,8 @@ public void testBuild() throws IOException { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); SuggestionContext suggestionContext = suggestionBuilder.build(mockContext); @@ -243,7 +245,8 @@ public void testBuildWithUnmappedField() { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); if (randomBoolean()) { mockContext.setAllowUnmappedFields(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index e5e7e19de0fa4..d1b9a9e4b7e82 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -139,6 +139,7 @@ import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; import org.elasticsearch.index.seqno.RetentionLeaseSyncer; @@ -2194,6 +2195,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { .client(client) .featureService(new FeatureService(List.of(new IndicesFeatures()))) .metaStateService(new MetaStateService(nodeEnv, namedXContentRegistry)) + .mapperMetrics(MapperMetrics.NOOP) .build(); final RecoverySettings recoverySettings = new RecoverySettings(settings, clusterSettings); snapshotShardsService = new SnapshotShardsService( @@ -2371,7 +2373,8 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { namedXContentRegistry, mapperRegistry, indexScopedSettings, - ScriptCompiler.NONE + ScriptCompiler.NONE, + MapperMetrics.NOOP ), shardLimitValidator, EmptySystemIndices.INSTANCE, diff --git a/test/framework/build.gradle b/test/framework/build.gradle index 22a95880193e2..7906a52479b29 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -26,9 +26,9 @@ dependencies { api "commons-codec:commons-codec:${versions.commonscodec}" // mockito - api 'org.mockito:mockito-core:5.9.0' - api 'org.mockito:mockito-subclass:5.9.0' - api 'net.bytebuddy:byte-buddy:1.14.11' + api 'org.mockito:mockito-core:5.11.0' + api 'org.mockito:mockito-subclass:5.11.0' + api 'net.bytebuddy:byte-buddy:1.14.12' api 'org.objenesis:objenesis:3.3' api "org.elasticsearch:mocksocket:${versions.mocksocket}" diff --git a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java index 57c7a34920182..5025299b09b64 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MapperTestUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.env.Environment; import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; @@ -66,7 +67,8 @@ public static MapperService newMapperService( mapperRegistry, () -> null, indexSettings.getMode().idFieldMapperWithoutFieldData(), - ScriptCompiler.NONE + ScriptCompiler.NONE, + MapperMetrics.NOOP ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index d6e33c43e94c5..c2da7a561c041 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceToParse; @@ -53,7 +54,8 @@ public TranslogHandler(NamedXContentRegistry xContentRegistry, IndexSettings ind mapperRegistry, () -> null, indexSettings.getMode().idFieldMapperWithoutFieldData(), - null + null, + MapperMetrics.NOOP ); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 620db8dc83510..0ede711b1eb56 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -58,6 +58,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.TelemetryPlugin; import org.elasticsearch.plugins.internal.DocumentSizeObserver; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -72,6 +73,7 @@ import org.elasticsearch.search.sort.BucketedSort.ExtraData; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -88,6 +90,7 @@ import java.util.Set; import java.util.function.BooleanSupplier; import java.util.function.Function; +import java.util.function.LongSupplier; import java.util.function.Supplier; import static java.util.Collections.emptyList; @@ -219,7 +222,9 @@ protected final MapperService createMapperService(IndexVersion version, Settings throw new UnsupportedOperationException(); }, indexSettings.getMode().buildIdFieldMapper(idFieldDataEnabled), - this::compileScript + this::compileScript, + MapperMetrics.NOOP + ); } @@ -237,6 +242,22 @@ protected static IndexSettings createIndexSettings(IndexVersion version, Setting return new IndexSettings(meta, settings); } + protected MapperMetrics createTestMapperMetrics() { + var telemetryProvider = getPlugins().stream() + .filter(p -> p instanceof TelemetryPlugin) + .map(p -> ((TelemetryPlugin) p).getTelemetryProvider(Settings.EMPTY)) + .findFirst() + .orElse(TelemetryProvider.NOOP); + return new MapperMetrics(new SourceFieldMetrics(telemetryProvider.getMeterRegistry(), new LongSupplier() { + private long value = 1; + + @Override + public long getAsLong() { + return value++; + } + })); + } + protected static void withLuceneIndex( MapperService mapperService, CheckedConsumer builder, @@ -670,7 +691,8 @@ public void onRemoval(ShardId shardId, Accountable accountable) { null, () -> true, null, - Collections.emptyMap() + Collections.emptyMap(), + MapperMetrics.NOOP ); } @@ -725,7 +747,7 @@ private void roundTripSyntheticSource(DocumentMapper mapper, String syntheticSou } private static String syntheticSource(DocumentMapper mapper, IndexReader reader, int docId) throws IOException { - SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping()); + SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), SourceFieldMetrics.NOOP); Source synthetic = provider.getSource(getOnlyLeafReader(reader).getContext(), docId); return synthetic.internalSourceRef().utf8ToString(); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 5f60e0eedbf03..f9af0d27f3e6f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -1188,7 +1188,7 @@ public final void testSyntheticSourceMany() throws IOException { } try (DirectoryReader reader = DirectoryReader.open(directory)) { int i = 0; - SourceLoader loader = mapper.sourceMapper().newSourceLoader(mapper.mapping()); + SourceLoader loader = mapper.sourceMapper().newSourceLoader(mapper.mapping(), SourceFieldMetrics.NOOP); StoredFieldLoader storedFieldLoader = loader.requiredStoredFields().isEmpty() ? StoredFieldLoader.empty() : StoredFieldLoader.create(false, loader.requiredStoredFields()); diff --git a/test/framework/src/main/java/org/elasticsearch/index/query/SearchExecutionContextHelper.java b/test/framework/src/main/java/org/elasticsearch/index/query/SearchExecutionContextHelper.java index 8597025383bf1..3efe2d713f1d1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/query/SearchExecutionContextHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/index/query/SearchExecutionContextHelper.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -43,7 +44,8 @@ public static SearchExecutionContext createSimple( null, () -> true, null, - Collections.emptyMap() + Collections.emptyMap(), + MapperMetrics.NOOP ); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index fe7323aef41a7..a389020cdcde8 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -51,6 +51,7 @@ import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.seqno.ReplicationTracker; @@ -541,7 +542,8 @@ protected IndexShard newShard( breakerService, IndexModule.DEFAULT_SNAPSHOT_COMMIT_SUPPLIER, relativeTimeSupplier, - null + null, + MapperMetrics.NOOP ); indexShard.addShardFailureCallback(DEFAULT_SHARD_FAILURE_HANDLER); success = true; diff --git a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java deleted file mode 100644 index e105d61f7ee0a..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/indices/SystemIndexThreadPoolTestCase.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.indices; - -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.threadpool.ThreadPool; - -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Phaser; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.startsWith; - -/** - * Tests to verify that system indices are bypassing user-space thread pools - * - *

We can block thread pools by setting them to one thread and no queue, then submitting - * threads that wait on a countdown latch. This lets us verify that operations on system indices - * are being directed to other thread pools.

- * - *

When implementing this class, don't forget to override {@link ESIntegTestCase#nodePlugins()} if - * the relevant system index is defined in a plugin.

- */ -public abstract class SystemIndexThreadPoolTestCase extends ESIntegTestCase { - - private static final String USER_INDEX = "user_index"; - - // For system indices that use ExecutorNames.CRITICAL_SYSTEM_INDEX_THREAD_POOLS, we'll want to - // block normal system index thread pools as well. - protected Set threadPoolsToBlock() { - return Set.of(ThreadPool.Names.GET, ThreadPool.Names.WRITE, ThreadPool.Names.SEARCH); - } - - protected void runWithBlockedThreadPools(Runnable runnable) { - Phaser phaser = new Phaser(); - Runnable waitAction = () -> { - phaser.arriveAndAwaitAdvance(); - phaser.arriveAndAwaitAdvance(); - }; - phaser.register(); // register this test's thread - - for (String nodeName : internalCluster().getNodeNames()) { - ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); - for (String threadPoolName : threadPoolsToBlock()) { - ThreadPool.Info info = threadPool.info(threadPoolName); - phaser.bulkRegister(info.getMax()); - for (int i = 0; i < info.getMax(); i++) { - threadPool.executor(threadPoolName).submit(waitAction); - } - } - } - phaser.arriveAndAwaitAdvance(); - try { - runnable.run(); - } finally { - phaser.arriveAndAwaitAdvance(); - } - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107625") - public void testUserThreadPoolsAreBlocked() { - assertAcked(client().admin().indices().prepareCreate(USER_INDEX)); - - runWithBlockedThreadPools(this::assertThreadPoolsBlocked); - - assertAcked(client().admin().indices().prepareDelete(USER_INDEX)); - } - - private void assertThreadPoolsBlocked() { - fillThreadPoolQueues(); // rejections are easier to check than timeouts - - var e1 = expectThrows( - EsRejectedExecutionException.class, - () -> client().prepareIndex(USER_INDEX).setSource(Map.of("foo", "bar")).get() - ); - assertThat(e1.getMessage(), startsWith("rejected execution of TimedRunnable")); - var e2 = expectThrows(EsRejectedExecutionException.class, () -> client().prepareGet(USER_INDEX, "id").get()); - assertThat(e2.getMessage(), startsWith("rejected execution of ActionRunnable")); - var e3 = expectThrows( - SearchPhaseExecutionException.class, - () -> client().prepareSearch(USER_INDEX) - .setQuery(QueryBuilders.matchAllQuery()) - // Request times out if max concurrent shard requests is set to 1 - .setMaxConcurrentShardRequests(usually() ? SearchRequest.DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS : randomIntBetween(2, 10)) - .get() - ); - assertThat(e3.getMessage(), containsString("all shards failed")); - } - - private void fillThreadPoolQueues() { - for (String nodeName : internalCluster().getNodeNames()) { - ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); - for (String threadPoolName : threadPoolsToBlock()) { - ThreadPool.Info info = threadPool.info(threadPoolName); - - // fill up the queue - for (int i = 0; i < info.getQueueSize().singles(); i++) { - try { - threadPool.executor(threadPoolName).submit(() -> {}); - } catch (EsRejectedExecutionException e) { - // we can't be sure that some other task won't get queued in a test cluster - // but we should put all the tasks in there anyway - } - } - } - } - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 1787638f9fdf3..1f04b60efc8ae 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -94,6 +94,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MappingParserContext; @@ -384,7 +385,8 @@ public void onCache(ShardId shardId, Accountable accountable) {} null, () -> true, valuesSourceRegistry, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ) { @Override public Iterable dimensionFields() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 606bf35d58f14..8f1a0072c9a51 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -48,6 +48,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.CoordinatorRewriteContext; @@ -475,7 +476,8 @@ private static class ServiceHolder implements Closeable { mapperRegistry, () -> createShardContext(null), idxSettings.getMode().idFieldMapperWithoutFieldData(), - ScriptCompiler.NONE + ScriptCompiler.NONE, + MapperMetrics.NOOP ); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(nodeSettings, new IndexFieldDataCache.Listener() { }); @@ -594,7 +596,8 @@ SearchExecutionContext createShardContext(IndexSearcher searcher) { indexNameMatcher(), () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index 40cdacb767d0f..e05c2dde930a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -354,5 +354,10 @@ public T get(String path) { } return (T) context; } + + @Override + public String toString() { + return "JsonMapView{map=" + map + '}'; + } } } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java index 91d35d79b7c87..0ecc0fdc81e6b 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java @@ -200,6 +200,7 @@ public void testTermsAggregation() throws Exception { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/81941") public void testRestartAfterCompletion() throws Exception { final String initialId; try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, new SearchSourceBuilder(), 0, 2)) { diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index 51e09723bc7ed..481f39d673410 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -59,6 +59,7 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; import java.util.function.IntConsumer; +import java.util.function.LongSupplier; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -323,12 +324,25 @@ private CacheEntry(T chunk) { private final Runnable evictIncrementer; + private final LongSupplier relativeTimeInNanosSupplier; + public SharedBlobCacheService( NodeEnvironment environment, Settings settings, ThreadPool threadPool, String ioExecutor, BlobCacheMetrics blobCacheMetrics + ) { + this(environment, settings, threadPool, ioExecutor, blobCacheMetrics, threadPool::relativeTimeInNanos); + } + + public SharedBlobCacheService( + NodeEnvironment environment, + Settings settings, + ThreadPool threadPool, + String ioExecutor, + BlobCacheMetrics blobCacheMetrics, + LongSupplier relativeTimeInNanosSupplier ) { this.threadPool = threadPool; this.ioExecutor = threadPool.executor(ioExecutor); @@ -370,6 +384,7 @@ public SharedBlobCacheService( this.blobCacheMetrics = blobCacheMetrics; this.evictIncrementer = blobCacheMetrics.getEvictedCountNonZeroFrequency()::increment; + this.relativeTimeInNanosSupplier = relativeTimeInNanosSupplier; } public static long calculateCacheSize(Settings settings, long totalFsSize) { @@ -1068,7 +1083,7 @@ public int populateAndRead( assert assertOffsetsWithinFileLength(rangeToRead.start(), rangeToRead.length(), length); // We are interested in the total time that the system spends when fetching a result (including time spent queuing), so we start // our measurement here. - final long startTime = threadPool.relativeTimeInNanos(); + final long startTime = relativeTimeInNanosSupplier.getAsLong(); RangeMissingHandler writerInstrumentationDecorator = ( SharedBytes.IO channel, int channelPos, @@ -1076,7 +1091,7 @@ public int populateAndRead( int length, IntConsumer progressUpdater) -> { writer.fillCacheRange(channel, channelPos, relativePos, length, progressUpdater); - var elapsedTime = TimeUnit.NANOSECONDS.toMicros(threadPool.relativeTimeInNanos() - startTime); + var elapsedTime = TimeUnit.NANOSECONDS.toMicros(relativeTimeInNanosSupplier.getAsLong() - startTime); SharedBlobCacheService.this.blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime); SharedBlobCacheService.this.blobCacheMetrics.getCacheMissCounter().increment(); }; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java index c23d75f029372..95fa595411710 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java @@ -45,11 +45,6 @@ public Request(TimeValue masterNodeTimeout, TimeValue requestTimeout) { this.requestTimeout = Objects.requireNonNull(requestTimeout); } - @Deprecated(forRemoval = true) // temporary compatibility shi - public Request(TimeValue timeout) { - this(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); - } - public Request(StreamInput in) throws IOException { super(in); this.requestTimeout = in.readTimeValue(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java index a4d5a60094645..9dfa98607851a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetBitsetCacheTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MockFieldMapper; @@ -601,7 +602,8 @@ private TestIndexContext testIndex(MappingLookup mappingLookup, Client client) t null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); context = new TestIndexContext(directory, iw, directoryReader, searchExecutionContext, leaf); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index 6342f573a838c..c5b5470856c7b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MockFieldMapper; @@ -104,7 +105,8 @@ public void testDLS() throws Exception { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); SearchExecutionContext searchExecutionContext = spy(realSearchExecutionContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor()); @@ -261,7 +263,8 @@ public void testDLSWithLimitedPermissions() throws Exception { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ); SearchExecutionContext searchExecutionContext = spy(realSearchExecutionContext); DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor()); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md index dad5ae2828174..fc68fe9567f2b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/README.md @@ -164,19 +164,26 @@ Finally, this'll appear in the docs as a table kind of like this: CSV-SPEC tests run against half-upgraded clusters in the `x-pack:plugin:esql:qa:server:mixed-cluster` project and will fail if they test -new behavior against an old node. To stop them from running you should create -a `NodeFeature` in `EsqlFeatures` for your change. Then you can skip it by -adding a `required_feature` to your test like so: +new behavior against an old node. To stop them from running you should add an +entry to the list of capabilities in `EsqlCapabilities` for your change. +Then you can skip it by adding a `required_capability` to your test like so: ```csv-spec mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort row a = [true, false, false, true] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); ``` -That skips nodes that don't have the `esql.mv_sort` feature. +That skips nodes that don't have the `mv_sort` capability. +NOTE: It is also possible to do this by creating a `NodeFeature` in `EsqlFeatures` for your change. +In that case the feature should be prefixed with `esql.`, but this prefix should +not be referenced in the test. For example, the feature `esql.mv_sort` should +cause a test to be skipped using the same `required_capability: mv_sort` above. +It is preferable to use `EsqlCapabilities` for new features, although all existing +`EsqlFeatures` will continue to work. It is not possible to remove an existing +`EsqlFeature` without breaking backwards compatibility. ### Warnings diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index d55615e9df48a..b23f6f188d8c5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -44,7 +44,7 @@ public class CohereServiceSettings extends FilteredXContentObject implements Ser public static final String OLD_MODEL_ID_FIELD = "model"; public static final String MODEL_ID = "model_id"; private static final Logger logger = LogManager.getLogger(CohereServiceSettings.class); - // The rate limit defined here is pulled for the blog: https://txt.cohere.com/free-developer-tier-announcement/ for the production tier + // Production key rate limits for all endpoints: https://docs.cohere.com/docs/going-live#production-key-specifications // 10K requests a minute private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(10_000); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java index 5a51e89f57e11..060dc23b935cc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/EmptyTaskSettingsTests.java @@ -29,6 +29,7 @@ protected EmptyTaskSettings createTestInstance() { @Override protected EmptyTaskSettings mutateInstance(EmptyTaskSettings instance) { + // All instances are the same and have no fields, nothing to mutate return null; } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java index 7016122fedcf8..428dbca892438 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionResponseTests.java @@ -53,7 +53,7 @@ protected InferenceAction.Response createTestInstance() { @Override protected InferenceAction.Response mutateInstance(InferenceAction.Response instance) throws IOException { - return null; + return randomValueOtherThan(instance, this::createTestInstance); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java index 9484763912cda..4be38b9d0e9d0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedSparseEmbeddingResultsTests.java @@ -132,6 +132,6 @@ protected ChunkedSparseEmbeddingResults createTestInstance() { @Override protected ChunkedSparseEmbeddingResults mutateInstance(ChunkedSparseEmbeddingResults instance) throws IOException { - return null; + return randomValueOtherThan(instance, ChunkedSparseEmbeddingResultsTests::createRandomResults); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java index c908d2c85f620..05b86217862e9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingByteResultsTests.java @@ -131,6 +131,6 @@ protected ChunkedTextEmbeddingByteResults createTestInstance() { @Override protected ChunkedTextEmbeddingByteResults mutateInstance(ChunkedTextEmbeddingByteResults instance) throws IOException { - return null; + return randomValueOtherThan(instance, ChunkedTextEmbeddingByteResultsTests::createRandomResults); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java index 9b18f5536713e..966a639713ba6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingFloatResultsTests.java @@ -51,6 +51,6 @@ protected ChunkedTextEmbeddingFloatResults createTestInstance() { @Override protected ChunkedTextEmbeddingFloatResults mutateInstance(ChunkedTextEmbeddingFloatResults instance) throws IOException { - return null; + return randomValueOtherThan(instance, this::createTestInstance); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java index 9e827b51d50f6..fd0c8b19aae24 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/ChunkedTextEmbeddingResultsTests.java @@ -159,6 +159,6 @@ protected ChunkedTextEmbeddingResults createTestInstance() { @Override protected ChunkedTextEmbeddingResults mutateInstance(ChunkedTextEmbeddingResults instance) throws IOException { - return null; + return randomValueOtherThan(instance, ChunkedTextEmbeddingResultsTests::createRandomResults); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 7d0d076a0a22c..bf9fdbe7235b6 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -95,6 +95,7 @@ public void testRemoveAsTypeWithInCorrectType() { e.getMessage(), containsString("field [a] is not of the expected type. The value [5] cannot be converted to a [String]") ); + assertNull(map.get("a")); e = expectThrows(ElasticsearchStatusException.class, () -> ServiceUtils.removeAsType(map, "b", Boolean.class)); assertThat( @@ -124,7 +125,7 @@ public void testRemoveAsTypeWithInCorrectType() { e.getMessage(), containsString("field [e] is not of the expected type. The value [5] cannot be converted to a [Double]") ); - assertNull(map.get("d")); + assertNull(map.get("e")); assertThat(map.entrySet(), empty()); } @@ -140,6 +141,7 @@ public void testRemoveAsType_Validation_WithInCorrectType() { validationException.validationErrors().get(0), containsString("field [a] is not of the expected type. The value [5] cannot be converted to a [String]") ); + assertNull(map.get("a")); validationException = new ValidationException(); ServiceUtils.removeAsType(map, "b", Boolean.class, validationException); @@ -180,14 +182,14 @@ public void testRemoveAsType_Validation_WithInCorrectType() { validationException.validationErrors().get(0), containsString("field [e] is not of the expected type. The value [5] cannot be converted to a [Double]") ); - assertNull(map.get("d")); + assertNull(map.get("e")); assertThat(map.entrySet(), empty()); } public void testRemoveAsTypeMissingReturnsNull() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE)); - assertNull(ServiceUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); + assertNull(ServiceUtils.removeAsType(map, "missing", Integer.class)); assertThat(map.entrySet(), hasSize(3)); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java index d2b83d7b14e2b..bfff26bcaefc0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettingsTests.java @@ -143,7 +143,7 @@ protected AzureOpenAiSecretSettings createTestInstance() { @Override protected AzureOpenAiSecretSettings mutateInstance(AzureOpenAiSecretSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, AzureOpenAiSecretSettingsTests::createRandom); } public static Map getAzureOpenAiSecretSettingsMap(@Nullable String apiKey, @Nullable String entraId) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java index cbaa41c37958d..46e514c8b16c4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java @@ -87,6 +87,6 @@ protected AzureOpenAiCompletionServiceSettings createTestInstance() { @Override protected AzureOpenAiCompletionServiceSettings mutateInstance(AzureOpenAiCompletionServiceSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, AzureOpenAiCompletionServiceSettingsTests::createRandom); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java index 7f0e730b8835c..15e1d8d7809c5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java @@ -94,6 +94,6 @@ protected AzureOpenAiCompletionTaskSettings createTestInstance() { @Override protected AzureOpenAiCompletionTaskSettings mutateInstance(AzureOpenAiCompletionTaskSettings instance) throws IOException { - return createRandomWithUser(); + return randomValueOtherThan(instance, AzureOpenAiCompletionTaskSettingsTests::createRandomWithUser); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java index 7c56ffad27c80..f4c6f9b2a4f07 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettingsTests.java @@ -423,7 +423,7 @@ protected AzureOpenAiEmbeddingsServiceSettings createTestInstance() { @Override protected AzureOpenAiEmbeddingsServiceSettings mutateInstance(AzureOpenAiEmbeddingsServiceSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, AzureOpenAiEmbeddingsServiceSettingsTests::createRandom); } public static Map getPersistentAzureOpenAiServiceSettingsMap( diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java index cc2d8b9b67620..324bdd15d9256 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsTaskSettingsTests.java @@ -92,7 +92,7 @@ protected AzureOpenAiEmbeddingsTaskSettings createTestInstance() { @Override protected AzureOpenAiEmbeddingsTaskSettings mutateInstance(AzureOpenAiEmbeddingsTaskSettings instance) throws IOException { - return createRandomWithUser(); + return randomValueOtherThan(instance, AzureOpenAiEmbeddingsTaskSettingsTests::createRandomWithUser); } public static Map getAzureOpenAiRequestTaskSettingsMap(@Nullable String user) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java index a010f63802052..303ed1cab2c50 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -275,7 +275,7 @@ protected CohereServiceSettings createTestInstance() { @Override protected CohereServiceSettings mutateInstance(CohereServiceSettings instance) throws IOException { - return null; + return randomValueOtherThan(instance, CohereServiceSettingsTests::createRandom); } public static Map getServiceSettingsMap(@Nullable String url, @Nullable String model) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index a306a3e660cd9..6f8fe6344b57f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -358,7 +358,7 @@ protected CohereEmbeddingsServiceSettings createTestInstance() { @Override protected CohereEmbeddingsServiceSettings mutateInstance(CohereEmbeddingsServiceSettings instance) throws IOException { - return null; + return randomValueOtherThan(instance, CohereEmbeddingsServiceSettingsTests::createRandom); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java index 4f5d872f09eb8..c18310eb9a84a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettingsTests.java @@ -156,7 +156,7 @@ protected CohereEmbeddingsTaskSettings createTestInstance() { @Override protected CohereEmbeddingsTaskSettings mutateInstance(CohereEmbeddingsTaskSettings instance) throws IOException { - return null; + return randomValueOtherThan(instance, CohereEmbeddingsTaskSettingsTests::createRandom); } public static Map getTaskSettingsMapEmpty() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java index cb30077fec174..4943ddf74fda1 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankServiceSettingsTests.java @@ -77,7 +77,7 @@ protected CohereRerankServiceSettings createTestInstance() { @Override protected CohereRerankServiceSettings mutateInstance(CohereRerankServiceSettings instance) throws IOException { - return null; + return randomValueOtherThan(instance, CohereRerankServiceSettingsTests::createRandom); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java index d81c94a0dedda..9d92f756dd31c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceSettingsTests.java @@ -197,7 +197,7 @@ protected HuggingFaceServiceSettings createTestInstance() { @Override protected HuggingFaceServiceSettings mutateInstance(HuggingFaceServiceSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, HuggingFaceServiceSettingsTests::createRandom); } public static Map getServiceSettingsMap(String url) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java index 2b8281da8db13..f69a9b5a967e0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserSecretSettingsTests.java @@ -77,6 +77,6 @@ protected HuggingFaceElserSecretSettings createTestInstance() { @Override protected HuggingFaceElserSecretSettings mutateInstance(HuggingFaceElserSecretSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, HuggingFaceElserSecretSettingsTests::createRandom); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java index eadefddecce70..bd6a5007b72ee 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettingsTests.java @@ -122,6 +122,6 @@ protected HuggingFaceElserServiceSettings createTestInstance() { @Override protected HuggingFaceElserServiceSettings mutateInstance(HuggingFaceElserServiceSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, HuggingFaceElserServiceSettingsTests::createRandom); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java index b9b4310699d07..75ea63eba8a34 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionServiceSettingsTests.java @@ -236,7 +236,7 @@ protected OpenAiChatCompletionServiceSettings createTestInstance() { @Override protected OpenAiChatCompletionServiceSettings mutateInstance(OpenAiChatCompletionServiceSettings instance) throws IOException { - return createRandomWithNonNullUrl(); + return randomValueOtherThan(instance, OpenAiChatCompletionServiceSettingsTests::createRandomWithNonNullUrl); } private static OpenAiChatCompletionServiceSettings createRandomWithNonNullUrl() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java index c964d2643459d..1be70ee586835 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettingsTests.java @@ -441,7 +441,7 @@ protected OpenAiEmbeddingsServiceSettings createTestInstance() { @Override protected OpenAiEmbeddingsServiceSettings mutateInstance(OpenAiEmbeddingsServiceSettings instance) throws IOException { - return createRandomWithNonNullUrl(); + return randomValueOtherThan(instance, OpenAiEmbeddingsServiceSettingsTests::createRandomWithNonNullUrl); } public static Map getServiceSettingsMap(String modelId, @Nullable String url, @Nullable String org) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java index c5a510ef9de0c..464f5a1885d99 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java @@ -97,7 +97,7 @@ protected OpenAiEmbeddingsTaskSettings createTestInstance() { @Override protected OpenAiEmbeddingsTaskSettings mutateInstance(OpenAiEmbeddingsTaskSettings instance) throws IOException { - return createRandomWithUser(); + return randomValueOtherThan(instance, OpenAiEmbeddingsTaskSettingsTests::createRandomWithUser); } public static Map getTaskSettingsMap(@Nullable String user) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java index bd7a3ef4dcf03..212a867349e5c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettingsTests.java @@ -75,7 +75,7 @@ protected DefaultSecretSettings createTestInstance() { @Override protected DefaultSecretSettings mutateInstance(DefaultSecretSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, DefaultSecretSettingsTests::createRandom); } public static Map getSecretSettingsMap(String apiKey) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java index 65bcaca981020..cdee7c452ff52 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java @@ -100,6 +100,6 @@ protected RateLimitSettings createTestInstance() { @Override protected RateLimitSettings mutateInstance(RateLimitSettings instance) throws IOException { - return createRandom(); + return randomValueOtherThan(instance, RateLimitSettingsTests::createRandom); } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index 665548c432ca0..7ede898fa0425 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -42,6 +42,8 @@ import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction; +import org.elasticsearch.xpack.rollup.action.RollupInfoTransportAction; +import org.elasticsearch.xpack.rollup.action.RollupUsageTransportAction; import org.elasticsearch.xpack.rollup.action.TransportDeleteRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportGetRollupCapsAction; import org.elasticsearch.xpack.rollup.action.TransportGetRollupIndexCapsAction; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupInfoTransportAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupInfoTransportAction.java similarity index 95% rename from x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupInfoTransportAction.java rename to x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupInfoTransportAction.java index 9bdb514ea5b30..0bbd27c7281de 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupInfoTransportAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupInfoTransportAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.rollup; +package org.elasticsearch.xpack.rollup.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.inject.Inject; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupUsageTransportAction.java similarity index 98% rename from x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java rename to x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupUsageTransportAction.java index c3b568fc32b71..c711553c99a17 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupUsageTransportAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/RollupUsageTransportAction.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.rollup; +package org.elasticsearch.xpack.rollup.action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupInfoTransportActionTests.java similarity index 98% rename from x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java rename to x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupInfoTransportActionTests.java index 243b478db6dbf..d2304b2c7d9a3 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupInfoTransportActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/RollupInfoTransportActionTests.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.rollup; +package org.elasticsearch.xpack.rollup.action; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 25e03c6d87e34..7cc91966f39bd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.index.SlowLogFieldProvider; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.license.ClusterStateLicenseService; import org.elasticsearch.license.License; @@ -374,7 +375,8 @@ public void testOnIndexModuleIsNoOpWithSecurityDisabled() throws Exception { () -> true, TestIndexNameExpressionResolver.newInstance(threadPool.getThreadContext()), Collections.emptyMap(), - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); security.onIndexModule(indexModule); // indexReaderWrapper is a SetOnce so if Security#onIndexModule had already set an ReaderWrapper we would get an exception here diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java index bee2d6aa22355..70896a67a9468 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.index.SlowLogFieldProvider; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.engine.InternalEngineFactory; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.plugins.Plugin; @@ -68,7 +69,8 @@ public void testWatcherDisabledTests() throws Exception { () -> true, TestIndexNameExpressionResolver.newInstance(), Collections.emptyMap(), - mock(SlowLogFieldProvider.class) + mock(SlowLogFieldProvider.class), + MapperMetrics.NOOP ); // this will trip an assertion if the watcher indexing operation listener is null (which it is) but we try to add it watcher.onIndexModule(indexModule); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 98f5daec730bb..07d4491daedf6 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -60,6 +60,7 @@ import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.MapperMetrics; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; @@ -1107,7 +1108,8 @@ protected final SearchExecutionContext createMockContext() { null, () -> true, null, - emptyMap() + emptyMap(), + MapperMetrics.NOOP ) { @Override public MappedFieldType getFieldType(String name) {