diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy index 144307912101c..237aa99e4b824 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTarFuncTest.groovy @@ -8,6 +8,8 @@ package org.elasticsearch.gradle.internal +import spock.lang.Ignore + import org.apache.commons.compress.archivers.tar.TarArchiveEntry import org.apache.commons.compress.archivers.tar.TarArchiveInputStream import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream @@ -36,6 +38,11 @@ class SymbolicLinkPreservingTarFuncTest extends AbstractGradleFuncTest { final Path linkToRealFolder = archiveSourceRoot.resolve("link-to-real-folder"); Files.createSymbolicLink(linkToRealFolder, Paths.get("./real-folder")); + final Path realFolder2 = testProjectDir.getRoot().toPath().resolve("real-folder2") + final Path realFolderSub = realFolder2.resolve("sub") + Files.createDirectory(realFolder2); + Files.createDirectory(realFolderSub); + buildFile << """ import org.elasticsearch.gradle.internal.SymbolicLinkPreservingTar @@ -56,6 +63,12 @@ tasks.register("buildBZip2Tar", SymbolicLinkPreservingTar) { SymbolicLinkPreserv tar.compression = Compression.BZIP2 tar.preserveFileTimestamps = ${preserverTimestamp} from fileTree("archiveRoot") + + into('config') { + dirMode 0750 + fileMode 0660 + from "real-folder2" + } } """ when: @@ -118,14 +131,20 @@ tasks.register("buildTar", SymbolicLinkPreservingTar) { SymbolicLinkPreservingTa if (entry.getName().equals("real-folder/")) { assert entry.isDirectory() realFolderEntry = true - } else if (entry.getName().equals("real-folder/file")) { + } else if (entry.getName().equals("real-folder/file")) { assert entry.isFile() fileEntry = true } else if (entry.getName().equals("real-folder/link-to-file")) { assert entry.isSymbolicLink() assert normalized(entry.getLinkName()) == "./file" linkToFileEntry = true - } else if (entry.getName().equals("link-in-folder/")) { + } else if (entry.getName().equals("config/")) { + assert entry.isDirectory() + assert entry.getMode() == 16877 + } else if (entry.getName().equals("config/sub/")) { + assert entry.isDirectory() + assert entry.getMode() == 16872 + }else if (entry.getName().equals("link-in-folder/")) { assert entry.isDirectory() linkInFolderEntry = true } else if (entry.getName().equals("link-in-folder/link-to-file")) { diff --git a/docs/changelog/102428.yaml b/docs/changelog/102428.yaml new file mode 100644 index 0000000000000..275492fa6a888 --- /dev/null +++ b/docs/changelog/102428.yaml @@ -0,0 +1,5 @@ +pr: 102428 +summary: "ESQL: Add option to drop null fields" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/103399.yaml b/docs/changelog/103399.yaml new file mode 100644 index 0000000000000..440ac90b313f5 --- /dev/null +++ b/docs/changelog/103399.yaml @@ -0,0 +1,6 @@ +pr: 103399 +summary: "add validation on _id field when upsert new doc" +area: Search +type: bug +issues: + - 102981 diff --git a/docs/changelog/103763.yaml b/docs/changelog/103763.yaml new file mode 100644 index 0000000000000..e4d6556c77077 --- /dev/null +++ b/docs/changelog/103763.yaml @@ -0,0 +1,6 @@ +pr: 103763 +summary: Ref count search response bytes +area: Search +type: enhancement +issues: + - 102657 diff --git a/docs/changelog/104006.yaml b/docs/changelog/104006.yaml new file mode 100644 index 0000000000000..d840502cdefbe --- /dev/null +++ b/docs/changelog/104006.yaml @@ -0,0 +1,5 @@ +pr: 104006 +summary: Add support for more than one `inner_hit` when searching nested vectors +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/changelog/104033.yaml b/docs/changelog/104033.yaml new file mode 100644 index 0000000000000..d3e167665732c --- /dev/null +++ b/docs/changelog/104033.yaml @@ -0,0 +1,5 @@ +pr: 104033 +summary: Add Query Users API +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/104087.yaml b/docs/changelog/104087.yaml new file mode 100644 index 0000000000000..614e2d0de7e58 --- /dev/null +++ b/docs/changelog/104087.yaml @@ -0,0 +1,13 @@ +pr: 104087 +summary: Deprecate machine learning on Intel macOS +area: Machine Learning +type: deprecation +issues: [] +deprecation: + title: Deprecate machine learning on Intel macOS + area: Packaging + details: The machine learning plugin will be permanently disabled on macOS x86_64 + in new minor versions released from December 2024 onwards. + impact: To continue to use machine learning functionality on macOS please switch to + an arm64 machine (Apple silicon). Alternatively, it will still be possible to run + Elasticsearch with machine learning enabled in a Docker container on macOS x86_64. diff --git a/docs/changelog/104182.yaml b/docs/changelog/104182.yaml new file mode 100644 index 0000000000000..b5cf10f941cc6 --- /dev/null +++ b/docs/changelog/104182.yaml @@ -0,0 +1,5 @@ +pr: 104182 +summary: "Apm-data: fix `@custom` component templates" +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/104269.yaml b/docs/changelog/104269.yaml new file mode 100644 index 0000000000000..8d4b0fc5d5198 --- /dev/null +++ b/docs/changelog/104269.yaml @@ -0,0 +1,5 @@ +pr: 104269 +summary: "ESQL: Support loading shapes from source into WKB blocks" +area: "ES|QL" +type: enhancement +issues: [] diff --git a/docs/changelog/104355.yaml b/docs/changelog/104355.yaml new file mode 100644 index 0000000000000..2a100faf3c35f --- /dev/null +++ b/docs/changelog/104355.yaml @@ -0,0 +1,5 @@ +pr: 104355 +summary: Prepare enrich plan to support multi clusters +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/104386.yaml b/docs/changelog/104386.yaml new file mode 100644 index 0000000000000..41b6a17424bbd --- /dev/null +++ b/docs/changelog/104386.yaml @@ -0,0 +1,6 @@ +pr: 104386 +summary: "X-pack/plugin/apm-data: add dynamic setting for enabling template registry" +area: Data streams +type: enhancement +issues: + - 104385 diff --git a/docs/changelog/104396.yaml b/docs/changelog/104396.yaml new file mode 100644 index 0000000000000..586fdc1b22624 --- /dev/null +++ b/docs/changelog/104396.yaml @@ -0,0 +1,5 @@ +pr: 104396 +summary: Report current master in `PeerFinder` +area: Cluster Coordination +type: enhancement +issues: [] diff --git a/docs/changelog/104406.yaml b/docs/changelog/104406.yaml new file mode 100644 index 0000000000000..d26ef664abc07 --- /dev/null +++ b/docs/changelog/104406.yaml @@ -0,0 +1,5 @@ +pr: 104406 +summary: Support patch transport version from 8.12 +area: Downsampling +type: enhancement +issues: [] diff --git a/docs/changelog/104407.yaml b/docs/changelog/104407.yaml new file mode 100644 index 0000000000000..1ce6b6f97f580 --- /dev/null +++ b/docs/changelog/104407.yaml @@ -0,0 +1,6 @@ +pr: 104407 +summary: Set read timeout for fetching IMDSv2 token +area: Discovery-Plugins +type: enhancement +issues: + - 104244 diff --git a/docs/changelog/104418.yaml b/docs/changelog/104418.yaml new file mode 100644 index 0000000000000..d27b66cebea87 --- /dev/null +++ b/docs/changelog/104418.yaml @@ -0,0 +1,6 @@ +pr: 104418 +summary: Fix `routing_path` when template has multiple `path_match` and multi-fields +area: TSDB +type: bug +issues: + - 104400 diff --git a/docs/changelog/104460.yaml b/docs/changelog/104460.yaml new file mode 100644 index 0000000000000..c92acdd5cb8ad --- /dev/null +++ b/docs/changelog/104460.yaml @@ -0,0 +1,5 @@ +pr: 104460 +summary: Dyamically adjust node metrics cache expire +area: Search +type: enhancement +issues: [] diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index bbfa41538528a..e1e27be12a36f 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -43,6 +43,12 @@ or alias you search. (Optional, string) Separator for CSV results. Defaults to `,`. The API only supports this parameter for CSV responses. +`drop_null_columns`:: +(Optional, boolean) Should columns that are entirely `null` be removed from +the `columns` and `values` portion of the results? Defaults to `false`. If +`true` the the response will include an extra section under the name +`all_columns` which has the name of all columns. + `format`:: (Optional, string) Format for the response. For valid values, refer to <>. @@ -75,17 +81,12 @@ responses. See <>. `columns`:: (array of objects) -Column headings for the search results. Each object is a column. -+ -.Properties of `columns` objects -[%collapsible%open] -===== -`name`:: -(string) Name of the column. - -`type`:: -(string) Data type for the column. -===== +Column `name` and `type` for each column returned in `values`. Each object is a single column. + +`all_columns`:: +(array of objects) +Column `name` and `type` for each queried column. Each object is a single column. This is only +returned if `drop_null_columns` is sent with the request. `rows`:: (array of arrays) diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 440e66d11096e..a2e7119bab05d 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -3,9 +3,11 @@ v | result boolean | integer cartesian_point | integer +cartesian_shape | integer datetime | integer double | integer geo_point | integer +geo_shape | integer integer | integer ip | integer keyword | integer diff --git a/docs/reference/esql/functions/types/mv_first.asciidoc b/docs/reference/esql/functions/types/mv_first.asciidoc index e6c67a454b96b..620c7cf13b771 100644 --- a/docs/reference/esql/functions/types/mv_first.asciidoc +++ b/docs/reference/esql/functions/types/mv_first.asciidoc @@ -3,9 +3,11 @@ v | result boolean | boolean cartesian_point | cartesian_point +cartesian_shape | cartesian_shape datetime | datetime double | double geo_point | geo_point +geo_shape | geo_shape integer | integer ip | ip keyword | keyword diff --git a/docs/reference/esql/functions/types/mv_last.asciidoc b/docs/reference/esql/functions/types/mv_last.asciidoc index e6c67a454b96b..620c7cf13b771 100644 --- a/docs/reference/esql/functions/types/mv_last.asciidoc +++ b/docs/reference/esql/functions/types/mv_last.asciidoc @@ -3,9 +3,11 @@ v | result boolean | boolean cartesian_point | cartesian_point +cartesian_shape | cartesian_shape datetime | datetime double | double geo_point | geo_point +geo_shape | geo_shape integer | integer ip | ip keyword | keyword diff --git a/docs/reference/esql/functions/types/to_string.asciidoc b/docs/reference/esql/functions/types/to_string.asciidoc index 4de4af735b07f..773e396f41373 100644 --- a/docs/reference/esql/functions/types/to_string.asciidoc +++ b/docs/reference/esql/functions/types/to_string.asciidoc @@ -3,9 +3,11 @@ v | result boolean | keyword cartesian_point | keyword +cartesian_shape | keyword datetime | keyword double | keyword geo_point | keyword +geo_shape | keyword integer | keyword ip | keyword keyword | keyword diff --git a/docs/reference/ingest/processors.asciidoc b/docs/reference/ingest/processors.asciidoc index 4132773e3d427..8622e0b98602c 100644 --- a/docs/reference/ingest/processors.asciidoc +++ b/docs/reference/ingest/processors.asciidoc @@ -4,7 +4,15 @@ Processor reference ++++ -{es} includes several configurable processors. To get a list of available +An <> is made up of a sequence of processors that are applied to documents as they are ingested into an index. +Each processor performs a specific task, such as filtering, transforming, or enriching data. + +Each successive processor depends on the output of the previous processor, so the order of processors is important. +The modified documents are indexed into {es} after all processors are applied. + +{es} includes over 40 configurable processors. +The subpages in this section contain reference documentation for each processor. +To get a list of available processors, use the <> API. [source,console] @@ -12,11 +20,191 @@ processors, use the <> API. GET _nodes/ingest?filter_path=nodes.*.ingest.processors ---- -The pages in this section contain reference documentation for each processor. +[discrete] +[[ingest-processors-categories]] +=== Ingest processors by category + +We've categorized the available processors on this page and summarized their functions. +This will help you find the right processor for your use case. + +* <> +* <> +* <> +* <> +* <> + +[discrete] +[[ingest-process-category-data-enrichment]] +=== Data enrichment processors + +[discrete] +[[ingest-process-category-data-enrichment-general]] +==== General outcomes + +<>:: +Appends a value to a field. + +<>:: +Points documents to the right time-based index based on a date or timestamp field. + +<>:: +Enriches documents with data from another index. +[TIP] +==== +Refer to <> for detailed examples of how to use the `enrich` processor to add data from your existing indices to incoming documents during ingest. +==== + +<>:: +Uses {ml} to classify and tag text fields. + +[discrete] +[[ingest-process-category-data-enrichment-specific]] +==== Specific outcomes + +<>:: +Parses and indexes binary data, such as PDFs and Word documents. + +<>:: +Converts a location field to a Geo-Point field. + +<>:: +Computes the Community ID for network flow data. + +<>:: +Computes a hash of the document’s content. + +<>:: +Converts geo-grid definitions of grid tiles or cells to regular bounding boxes or polygons which describe their shape. + +<>:: +Adds information about the geographical location of an IPv4 or IPv6 address. + +<>:: +Calculates the network direction given a source IP address, destination IP address, and a list of internal networks. + +<>:: +Extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). + +<>:: +Sets user-related details (such as `username`, `roles`, `email`, `full_name`,`metadata`, `api_key`, `realm` and `authentication_type`) from the current authenticated user to the current document by pre-processing the ingest. + +<>:: +Parses a Uniform Resource Identifier (URI) string and extracts its components as an object. + +<>:: +URL-decodes a string. + +<>:: +Parses user-agent strings to extract information about web clients. + +[discrete] +[[ingest-process-category-data-transformation]] +=== Data transformation processors + +[discrete] +[[ingest-process-category-data-transformation-general]] +==== General outcomes + +<>:: +Converts a field in the currently ingested document to a different type, such as converting a string to an integer. + +<>:: +Extracts structured fields out of a single text field within a document. +Unlike the <>, dissect does not use regular expressions. +This makes the dissect's a simpler and often faster alternative. + +<>:: +Extracts structured fields out of a single text field within a document, using the <> regular expression dialect that supports reusable aliased expressions. + +<>:: +Converts a string field by applying a regular expression and a replacement. + +<>:: +Uses the <> rules engine to obscure text in the input document matching the given Grok patterns. + +<>:: +Renames an existing field. + +<>:: +Sets a value on a field. + +[discrete] +[[ingest-process-category-data-transformation-specific]] +==== Specific outcomes + +<>:: +Converts a human-readable byte value to its value in bytes (for example `1kb` becomes `1024`). + +<>:: +Extracts a single line of CSV data from a text field. + +<>:: +Extracts and converts date fields. + +<> processor:: +Expands a field with dots into an object field. + +<>:: +Removes HTML tags from a field. + +<>:: +Joins each element of an array into a single string using a separator character between each element. + +<>:: +Parse messages (or specific event fields) containing key-value pairs. + +<> and <>:: +Converts a string field to lowercase or uppercase. + +<>:: +Splits a field into an array of values. + +<>:: +Trims whitespace from field. + +[discrete] +[[ingest-process-category-data-filtering]] +=== Data filtering processors + +<>:: +Drops the document without raising any errors. + +<>:: +Removes fields from documents. + +[discrete] +[[ingest-process-category-pipeline-handling]] +=== Pipeline handling processors + +<>:: +Raises an exception. Useful for when you expect a pipeline to fail and want to relay a specific message to the requester. + +<>:: +Executes another pipeline. + +<>:: +Reroutes documents to another target index or data stream. + +[discrete] +[[ingest-process-category-array-json-handling]] +=== Array/JSON handling processors + +<>:: +Runs an ingest processor on each element of an array or object. + +<>:: +Converts a JSON string into a structured JSON object. + +<>:: +Runs an inline or stored <> on incoming documents. +The script runs in the {painless}/painless-ingest-processor-context.html[painless `ingest` context]. + +<>:: +Sorts the elements of an array in ascending or descending order. [discrete] [[ingest-process-plugins]] -=== Processor plugins +=== Add additional processors You can install additional processors as {plugins}/ingest.html[plugins]. diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index a847d9a306b7c..a68cacec8c10c 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -821,9 +821,6 @@ Now we have filtered based on the top level `"creation_time"` and only one docum Additionally, if you wanted to extract the nearest passage for a matched document, you can supply <> to the `knn` clause. -NOTE: `inner_hits` for kNN will only ever return a single hit, the nearest passage vector. -Setting `"size"` to any value greater than `1` will have no effect on the results. - NOTE: When using `inner_hits` and multiple `knn` clauses, be sure to specify the <> field. Otherwise, a naming clash can occur and fail the search request. @@ -848,7 +845,8 @@ POST passage_vectors/_search "_source": false, "fields": [ "paragraph.text" - ] + ], + "size": 1 } } } @@ -891,7 +889,7 @@ Now the result will contain the nearest found paragraph when searching. "paragraph": { "hits": { "total": { - "value": 1, + "value": 2, "relation": "eq" }, "max_score": 1.0, @@ -935,7 +933,7 @@ Now the result will contain the nearest found paragraph when searching. "paragraph": { "hits": { "total": { - "value": 1, + "value": 2, "relation": "eq" }, "max_score": 0.9997144, diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index e5ad75e048c1b..de1f9e6c7a608 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -58,9 +58,14 @@ fix problems that an {es} deployment might encounter. * <> * <> -If none of these solutions relate to your issue, you can still get help: +[discrete] +[[troubleshooting-contact-support]] +=== Contact us + +If none of these guides relate to your issue, or you need further assistance, +then you can contact us as follows: -* For users with an active subscription, you can get help in several ways: +* If you have an active subscription, you have several options: ** Go directly to the http://support.elastic.co[Support Portal] diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index f34bb96b3eb81..9880e5e9914a8 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; @@ -30,6 +29,7 @@ import org.elasticsearch.action.datastreams.lifecycle.ErrorEntry; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.coordination.StableMasterHealthIndicatorService; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; @@ -313,7 +313,7 @@ public void testAutomaticForceMerge() throws Exception { for (int i = 0; i < randomIntBetween(10, 50); i++) { indexDocs(dataStreamName, randomIntBetween(1, 300)); // Make sure the segments get written: - FlushResponse flushResponse = indicesAdmin().flush(new FlushRequest(toBeRolledOverIndex)).actionGet(); + BroadcastResponse flushResponse = indicesAdmin().flush(new FlushRequest(toBeRolledOverIndex)).actionGet(); assertThat(flushResponse.getStatus(), equalTo(RestStatus.OK)); } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 519499addd77e..694e015b602f8 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -31,7 +31,7 @@ import java.io.UncheckedIOException; import java.time.Instant; import java.util.ArrayList; -import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -177,14 +177,18 @@ private List findRoutingPaths(String indexName, Settings allSettings, Li } MappingParserContext parserContext = mapperService.parserContext(); - for (String pathMatch : template.pathMatch()) { + for (Iterator iterator = template.pathMatch().iterator(); iterator.hasNext();) { var mapper = parserContext.typeParser(mappingSnippetType) - // Since FieldMapper.parse modifies the Map passed in (removing entries for "type"), that means - // that only the first pathMatch passed in gets recognized as a time_series_dimension. To counteract - // that, we wrap the mappingSnippet in a new HashMap for each pathMatch instance. - .parse(pathMatch, new HashMap<>(mappingSnippet), parserContext) + .parse(iterator.next(), mappingSnippet, parserContext) .build(MapperBuilderContext.root(false, false)); extractPath(routingPaths, mapper); + if (iterator.hasNext()) { + // Since FieldMapper.parse modifies the Map passed in (removing entries for "type"), that means + // that only the first pathMatch passed in gets recognized as a time_series_dimension. + // To avoid this, each parsing call uses a new mapping snippet. + // Note that a shallow copy of the mappingSnippet map is not enough if there are multi-fields. + mappingSnippet = template.mappingForName(templateName, KeywordFieldMapper.CONTENT_TYPE); + } } } return routingPaths; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index eff40cb1dbe62..8b15d6a4b7bdf 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; @@ -33,6 +32,7 @@ import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -1168,7 +1168,7 @@ private void forceMergeIndex(ForceMergeRequest forceMergeRequest, ActionListener logger.info("Data stream lifecycle is issuing a request to force merge index [{}]", targetIndex); client.admin().indices().forceMerge(forceMergeRequest, new ActionListener<>() { @Override - public void onResponse(ForceMergeResponse forceMergeResponse) { + public void onResponse(BroadcastResponse forceMergeResponse) { if (forceMergeResponse.getFailedShards() > 0) { DefaultShardOperationFailedException[] failures = forceMergeResponse.getShardFailures(); String message = Strings.format( diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java index 62d07467d5086..db0e3e5cd6258 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProviderTests.java @@ -493,6 +493,55 @@ public void testGenerateRoutingPathFromDynamicTemplateWithMultiplePathMatchEntri assertEquals(3, routingPathList.size()); } + public void testGenerateRoutingPathFromDynamicTemplateWithMultiplePathMatchEntriesMultiFields() throws Exception { + Instant now = Instant.now().truncatedTo(ChronoUnit.SECONDS); + String mapping = """ + { + "_doc": { + "dynamic_templates": [ + { + "labels": { + "path_match": ["xprometheus.labels.*", "yprometheus.labels.*"], + "mapping": { + "type": "keyword", + "time_series_dimension": true, + "fields": { + "text": { + "type": "text" + } + } + } + } + } + ], + "properties": { + "host": { + "properties": { + "id": { + "type": "keyword", + "time_series_dimension": true + } + } + }, + "another_field": { + "type": "keyword" + } + } + } + } + """; + Settings result = generateTsdbSettings(mapping, now); + assertThat(result.size(), equalTo(3)); + assertThat(IndexSettings.TIME_SERIES_START_TIME.get(result), equalTo(now.minusMillis(DEFAULT_LOOK_BACK_TIME.getMillis()))); + assertThat(IndexSettings.TIME_SERIES_END_TIME.get(result), equalTo(now.plusMillis(DEFAULT_LOOK_AHEAD_TIME.getMillis()))); + assertThat( + IndexMetadata.INDEX_ROUTING_PATH.get(result), + containsInAnyOrder("host.id", "xprometheus.labels.*", "yprometheus.labels.*") + ); + List routingPathList = IndexMetadata.INDEX_ROUTING_PATH.get(result); + assertEquals(3, routingPathList.size()); + } + public void testGenerateRoutingPathFromDynamicTemplate_templateWithNoPathMatch() throws Exception { Instant now = Instant.now().truncatedTo(ChronoUnit.SECONDS); String mapping = """ diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index befa16573de23..15f526d0a06d6 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockRequest; import org.elasticsearch.action.admin.indices.rollover.MaxAgeCondition; import org.elasticsearch.action.admin.indices.rollover.RolloverConditions; @@ -27,6 +26,7 @@ import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -578,7 +578,7 @@ public void testForceMerge() throws Exception { // We want this test method to get fake force merge responses, because this is what triggers a cluster state update clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 5, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 5, 0, List.of())); } }; String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); @@ -748,7 +748,7 @@ public void testForceMergeRetries() throws Exception { clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { listener.onResponse( - new ForceMergeResponse( + new BroadcastResponse( 5, 5, 1, @@ -779,7 +779,7 @@ public void testForceMergeRetries() throws Exception { AtomicInteger forceMergeFailedCount = new AtomicInteger(0); clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 4, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 4, 0, List.of())); forceMergeFailedCount.incrementAndGet(); } }; @@ -800,7 +800,7 @@ public void testForceMergeRetries() throws Exception { // For the final data stream lifecycle run, we let forcemerge run normally clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 5, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 5, 0, List.of())); } }; dataStreamLifecycleService.run(clusterService.state()); @@ -900,7 +900,7 @@ public void testForceMergeDedup() throws Exception { setState(clusterService, state); clientDelegate = (action, request, listener) -> { if (action.name().equals("indices:admin/forcemerge")) { - listener.onResponse(new ForceMergeResponse(5, 5, 0, List.of())); + listener.onResponse(new BroadcastResponse(5, 5, 0, List.of())); } }; for (int i = 0; i < 100; i++) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java index c7dbee47ea823..cbb41dfa02c5f 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java @@ -318,7 +318,7 @@ private String mockSearches(String databaseName, int firstChunk, int lastChunk) Map> requestMap = new HashMap<>(); for (int i = firstChunk; i <= lastChunk; i++) { byte[] chunk = data.get(i - firstChunk); - SearchHit hit = new SearchHit(i); + SearchHit hit = SearchHit.unpooled(i); try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { builder.map(Map.of("data", chunk)); builder.flush(); @@ -328,7 +328,7 @@ private String mockSearches(String databaseName, int firstChunk, int lastChunk) throw new UncheckedIOException(ex); } - SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f); + SearchHits hits = SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f); SearchResponse searchResponse = new SearchResponse(hits, null, null, false, null, null, 0, null, 1, 1, 0, 1L, null, null); toRelease.add(searchResponse::decRef); @SuppressWarnings("unchecked") diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java index 7fdce03252687..baf3006378054 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java @@ -17,13 +17,12 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -178,28 +177,34 @@ public int read() throws IOException { } public void testIndexChunksNoData() throws IOException { - client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { + client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(FlushResponse.class)); - }); - client.addHandler(RefreshAction.INSTANCE, (RefreshRequest request, ActionListener flushResponseActionListener) -> { - assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(RefreshResponse.class)); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); }); + client.addHandler( + RefreshAction.INSTANCE, + (RefreshRequest request, ActionListener flushResponseActionListener) -> { + assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); + } + ); InputStream empty = new ByteArrayInputStream(new byte[0]); assertEquals(0, geoIpDownloader.indexChunks("test", empty, 0, "d41d8cd98f00b204e9800998ecf8427e", 0)); } public void testIndexChunksMd5Mismatch() { - client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { - assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(FlushResponse.class)); - }); - client.addHandler(RefreshAction.INSTANCE, (RefreshRequest request, ActionListener flushResponseActionListener) -> { + client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(RefreshResponse.class)); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); }); + client.addHandler( + RefreshAction.INSTANCE, + (RefreshRequest request, ActionListener flushResponseActionListener) -> { + assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); + } + ); IOException exception = expectThrows( IOException.class, @@ -232,14 +237,17 @@ public void testIndexChunks() throws IOException { assertEquals(chunk + 15, source.get("chunk")); listener.onResponse(mock(IndexResponse.class)); }); - client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { - assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(FlushResponse.class)); - }); - client.addHandler(RefreshAction.INSTANCE, (RefreshRequest request, ActionListener flushResponseActionListener) -> { + client.addHandler(FlushAction.INSTANCE, (FlushRequest request, ActionListener flushResponseActionListener) -> { assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); - flushResponseActionListener.onResponse(mock(RefreshResponse.class)); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); }); + client.addHandler( + RefreshAction.INSTANCE, + (RefreshRequest request, ActionListener flushResponseActionListener) -> { + assertArrayEquals(new String[] { GeoIpDownloader.DATABASES_INDEX }, request.indices()); + flushResponseActionListener.onResponse(mock(BroadcastResponse.class)); + } + ); InputStream big = new ByteArrayInputStream(bigArray); assertEquals(17, geoIpDownloader.indexChunks("test", big, 15, "a67563dfa8f3cba8b8cff61eb989a749", 0)); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 4e3d9baaf5c92..138007c104d2b 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -97,7 +97,7 @@ public void process(HitContext hit) throws IOException { BytesReference document = percolateQuery.getDocuments().get(slot); leafStoredFields.advanceTo(slot); HitContext subContext = new HitContext( - new SearchHit(slot, "unknown"), + SearchHit.unpooled(slot, "unknown"), percolatorLeafReaderContext, slot, leafStoredFields.storedFields(), diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index b65d966bd6551..82ec63b785e56 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -56,7 +56,7 @@ public void testHitsExecute() throws Exception { LeafReaderContext context = reader.leaves().get(0); // A match: { - HitContext hit = new HitContext(new SearchHit(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); @@ -87,7 +87,7 @@ public void testHitsExecute() throws Exception { // No match: { - HitContext hit = new HitContext(new SearchHit(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value")); MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); @@ -117,7 +117,7 @@ public void testHitsExecute() throws Exception { // No query: { - HitContext hit = new HitContext(new SearchHit(0), context, 0, Map.of(), Source.empty(null)); + HitContext hit = new HitContext(SearchHit.unpooled(0), context, 0, Map.of(), Source.empty(null)); PercolateQuery.QueryStore queryStore = ctx -> docId -> null; MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java index d58c15d4efd74..f57c02bcdcc22 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RatedSearchHit.java @@ -33,12 +33,12 @@ public class RatedSearchHit implements Writeable, ToXContentObject { private final OptionalInt rating; public RatedSearchHit(SearchHit searchHit, OptionalInt rating) { - this.searchHit = searchHit; + this.searchHit = searchHit.asUnpooled(); this.rating = rating; } RatedSearchHit(StreamInput in) throws IOException { - this(SearchHit.readFrom(in), in.readBoolean() ? OptionalInt.of(in.readVInt()) : OptionalInt.empty()); + this(SearchHit.readFrom(in, false), in.readBoolean() ? OptionalInt.of(in.readVInt()) : OptionalInt.empty()); } @Override diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java index 7dad062ab3bca..37de70ded462f 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java @@ -209,7 +209,7 @@ public void testDeleteByQuery() throws Exception { .addSort(SORTING_FIELD, SortOrder.DESC), response -> { // Modify a subset of the target documents concurrently - final List originalDocs = Arrays.asList(response.getHits().getHits()); + final List originalDocs = Arrays.asList(response.getHits().asUnpooled().getHits()); docsModifiedConcurrently.addAll(randomSubsetOf(finalConflictingOps, originalDocs)); } ); diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java index fcea4618f4cd4..609702a58bf84 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; @@ -24,6 +23,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -554,9 +554,9 @@ void refreshAndFinish(List indexingFailures, List search RefreshRequest refresh = new RefreshRequest(); refresh.indices(destinationIndices.toArray(new String[destinationIndices.size()])); logger.debug("[{}]: refreshing", task.getId()); - bulkClient.admin().indices().refresh(refresh, new ActionListener() { + bulkClient.admin().indices().refresh(refresh, new ActionListener<>() { @Override - public void onResponse(RefreshResponse response) { + public void onResponse(BroadcastResponse response) { finishHim(null, indexingFailures, searchFailures, timedOut); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java index c3cf7cf62f925..c40a4f72bc133 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java @@ -567,8 +567,8 @@ protected RequestWrapper buildRequest(Hit doc) { action.start(); // create a simulated response. - SearchHit hit = new SearchHit(0, "id").sourceRef(new BytesArray("{}")); - SearchHits hits = new SearchHits( + SearchHit hit = SearchHit.unpooled(0, "id").sourceRef(new BytesArray("{}")); + SearchHits hits = SearchHits.unpooled( IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0 diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java index 7ac50eb0e7c6c..44e69d3a4cda8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java @@ -162,8 +162,8 @@ public void testScrollKeepAlive() { private SearchResponse createSearchResponse() { // create a simulated response. - SearchHit hit = new SearchHit(0, "id").sourceRef(new BytesArray("{}")); - SearchHits hits = new SearchHits( + SearchHit hit = SearchHit.unpooled(0, "id").sourceRef(new BytesArray("{}")); + SearchHits hits = SearchHits.unpooled( IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0 diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index f5c1912d15251..e916b02e62b8e 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; @@ -41,6 +42,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Predicate; import java.util.regex.Pattern; @@ -196,12 +198,21 @@ private static class AzureHTTPStatsCollectorHandler extends HttpStatsCollectorHa private static final Predicate LIST_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+\\?.+").asMatchPredicate(); private static final Predicate GET_BLOB_PATTERN = Pattern.compile("GET /[a-zA-Z0-9]+/[a-zA-Z0-9]+/.+").asMatchPredicate(); + private final Set seenRequestIds = ConcurrentCollections.newConcurrentSet(); + private AzureHTTPStatsCollectorHandler(HttpHandler delegate) { super(delegate); } @Override protected void maybeTrack(String request, Headers headers) { + // Same request id is a retry + // https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-ncnbi/817da997-30d2-4cd3-972f-a0073e4e98f7 + // Do not count retries since the client side request stats do not track them yet. + // See https://github.com/elastic/elasticsearch/issues/104443 + if (false == seenRequestIds.add(headers.getFirst("X-ms-client-request-id"))) { + return; + } if (GET_BLOB_PATTERN.test(request)) { trackRequest("GetBlob"); } else if (Regex.simpleMatch("HEAD /*/*/*", request)) { diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index e70151cbdf8ee..9ad2c57b7f585 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -15,8 +15,8 @@ import com.sun.net.httpserver.HttpHandler; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.blobstore.BlobContainer; @@ -191,7 +191,7 @@ public void testAbortRequestStats() throws Exception { waitForDocs(nbDocs, indexer); } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); @@ -234,7 +234,7 @@ public void testMetrics() throws Exception { waitForDocs(nbDocs, indexer); } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index ba762537537e3..83668cc271922 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -25,6 +25,7 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.Repository; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; @@ -84,13 +85,13 @@ protected S3Repository createRepository( @Override public Collection createComponents(PluginServices services) { - service.set(s3Service(services.environment(), services.clusterService().getSettings())); + service.set(s3Service(services.environment(), services.clusterService().getSettings(), services.resourceWatcherService())); this.service.get().refreshAndClearCache(S3ClientSettings.load(settings)); return List.of(service); } - S3Service s3Service(Environment environment, Settings nodeSettings) { - return new S3Service(environment, nodeSettings); + S3Service s3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { + return new S3Service(environment, nodeSettings, resourceWatcherService); } @Override diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index 1fd31047c735a..fc58482651fa3 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -28,6 +28,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.coordination.stateless.StoreHeartbeatService; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.Strings; @@ -37,6 +38,9 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; +import org.elasticsearch.watcher.FileChangesListener; +import org.elasticsearch.watcher.FileWatcher; +import org.elasticsearch.watcher.ResourceWatcherService; import java.io.Closeable; import java.io.IOException; @@ -68,7 +72,6 @@ class S3Service implements Closeable { TimeValue.timeValueHours(24), Setting.Property.NodeScope ); - private volatile Map clientsCache = emptyMap(); /** @@ -90,12 +93,13 @@ class S3Service implements Closeable { final TimeValue compareAndExchangeTimeToLive; final TimeValue compareAndExchangeAntiContentionDelay; - S3Service(Environment environment, Settings nodeSettings) { + S3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { webIdentityTokenCredentialsProvider = new CustomWebIdentityTokenCredentialsProvider( environment, System::getenv, System::getProperty, - Clock.systemUTC() + Clock.systemUTC(), + resourceWatcherService ); compareAndExchangeTimeToLive = REPOSITORY_S3_CAS_TTL_SETTING.get(nodeSettings); compareAndExchangeAntiContentionDelay = REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.get(nodeSettings); @@ -333,7 +337,8 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials Environment environment, SystemEnvironment systemEnvironment, JvmEnvironment jvmEnvironment, - Clock clock + Clock clock, + ResourceWatcherService resourceWatcherService ) { // Check whether the original environment variable exists. If it doesn't, // the system doesn't support AWS web identity tokens @@ -395,6 +400,31 @@ static class CustomWebIdentityTokenCredentialsProvider implements AWSCredentials roleSessionName, webIdentityTokenFileSymlink.toString() ).withStsClient(stsClient).build(); + var watcher = new FileWatcher(webIdentityTokenFileSymlink); + watcher.addListener(new FileChangesListener() { + + @Override + public void onFileCreated(Path file) { + onFileChanged(file); + } + + @Override + public void onFileChanged(Path file) { + if (file.equals(webIdentityTokenFileSymlink)) { + LOGGER.debug("WS web identity token file [{}] changed, updating credentials", file); + credentialsProvider.refresh(); + } + } + }); + try { + resourceWatcherService.add(watcher, ResourceWatcherService.Frequency.LOW); + } catch (IOException e) { + throw new ElasticsearchException( + "failed to start watching AWS web identity token file [{}]", + e, + webIdentityTokenFileSymlink + ); + } } catch (Exception e) { stsClient.shutdown(); throw e; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java index cecb0cd147897..fb775ab31c04d 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/CustomWebIdentityTokenCredentialsProviderTests.java @@ -9,16 +9,21 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; import com.sun.net.httpserver.HttpServer; import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.mocksocket.MockHttpServer; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.junit.After; import org.junit.Assert; import org.mockito.Mockito; @@ -36,12 +41,23 @@ import java.time.format.DateTimeFormatter; import java.util.Arrays; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.function.Consumer; import java.util.stream.Collectors; public class CustomWebIdentityTokenCredentialsProviderTests extends ESTestCase { private static final String ROLE_ARN = "arn:aws:iam::123456789012:role/FederatedWebIdentityRole"; private static final String ROLE_NAME = "aws-sdk-java-1651084775908"; + private final TestThreadPool threadPool = new TestThreadPool("test"); + private final Settings settings = Settings.builder().put("resource.reload.interval.low", TimeValue.timeValueMillis(100)).build(); + private final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool); + + @After + public void shutdown() throws Exception { + resourceWatcherService.close(); + threadPool.shutdown(); + } private static Environment getEnvironment() throws IOException { Path configDirectory = createTempDir("web-identity-token-test"); @@ -53,7 +69,7 @@ private static Environment getEnvironment() throws IOException { } @SuppressForbidden(reason = "HTTP server is used for testing") - public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { + private static HttpServer getHttpServer(Consumer webIdentityTokenCheck) throws IOException { HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0); httpServer.createContext("/", exchange -> { try (exchange) { @@ -62,6 +78,7 @@ public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { .map(e -> e.split("=")) .collect(Collectors.toMap(e -> e[0], e -> URLDecoder.decode(e[1], StandardCharsets.UTF_8))); assertEquals(ROLE_NAME, params.get("RoleSessionName")); + webIdentityTokenCheck.accept(params.get("WebIdentityToken")); exchange.getResponseHeaders().add("Content-Type", "text/xml; charset=UTF-8"); byte[] response = Strings.format( @@ -97,25 +114,41 @@ public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { } }); httpServer.start(); + return httpServer; + } - Environment environment = getEnvironment(); - - // No region is set, but the SDK shouldn't fail because of that - Map environmentVariables = Map.of( - "AWS_WEB_IDENTITY_TOKEN_FILE", - "/var/run/secrets/eks.amazonaws.com/serviceaccount/token", - "AWS_ROLE_ARN", - ROLE_ARN - ); - Map systemProperties = Map.of( + @SuppressForbidden(reason = "HTTP server is used for testing") + private static Map getSystemProperties(HttpServer httpServer) { + return Map.of( "com.amazonaws.sdk.stsMetadataServiceEndpointOverride", "http://" + httpServer.getAddress().getHostName() + ":" + httpServer.getAddress().getPort() ); + } + + private static Map environmentVariables() { + return Map.of("AWS_WEB_IDENTITY_TOKEN_FILE", "/var/run/secrets/eks.amazonaws.com/serviceaccount/token", "AWS_ROLE_ARN", ROLE_ARN); + } + + private static void assertCredentials(AWSCredentials credentials) { + Assert.assertFalse(credentials.getAWSAccessKeyId().isEmpty()); + Assert.assertFalse(credentials.getAWSSecretKey().isEmpty()); + } + + @SuppressForbidden(reason = "HTTP server is used for testing") + public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { + HttpServer httpServer = getHttpServer(s -> assertEquals("YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl", s)); + + Environment environment = getEnvironment(); + + // No region is set, but the SDK shouldn't fail because of that + Map environmentVariables = environmentVariables(); + Map systemProperties = getSystemProperties(httpServer); var webIdentityTokenCredentialsProvider = new S3Service.CustomWebIdentityTokenCredentialsProvider( environment, environmentVariables::get, systemProperties::getOrDefault, - Clock.fixed(Instant.ofEpochMilli(1651084775908L), ZoneOffset.UTC) + Clock.fixed(Instant.ofEpochMilli(1651084775908L), ZoneOffset.UTC), + resourceWatcherService ); try { AWSCredentials credentials = S3Service.buildCredentials( @@ -124,8 +157,64 @@ public void testCreateWebIdentityTokenCredentialsProvider() throws Exception { webIdentityTokenCredentialsProvider ).getCredentials(); - Assert.assertEquals("sts_access_key", credentials.getAWSAccessKeyId()); - Assert.assertEquals("secret_access_key", credentials.getAWSSecretKey()); + assertCredentials(credentials); + } finally { + webIdentityTokenCredentialsProvider.shutdown(); + httpServer.stop(0); + } + } + + private static class DelegatingConsumer implements Consumer { + private Consumer delegate; + + private DelegatingConsumer(Consumer delegate) { + this.delegate = delegate; + } + + private void setDelegate(Consumer delegate) { + this.delegate = delegate; + } + + @Override + public void accept(String s) { + delegate.accept(s); + } + } + + @SuppressForbidden(reason = "HTTP server is used for testing") + public void testPickUpNewWebIdentityTokenWhenItsChanged() throws Exception { + DelegatingConsumer webIdentityTokenCheck = new DelegatingConsumer(s -> assertEquals("YXdzLXdlYi1pZGVudGl0eS10b2tlbi1maWxl", s)); + + HttpServer httpServer = getHttpServer(webIdentityTokenCheck); + Environment environment = getEnvironment(); + Map environmentVariables = environmentVariables(); + Map systemProperties = getSystemProperties(httpServer); + var webIdentityTokenCredentialsProvider = new S3Service.CustomWebIdentityTokenCredentialsProvider( + environment, + environmentVariables::get, + systemProperties::getOrDefault, + Clock.fixed(Instant.ofEpochMilli(1651084775908L), ZoneOffset.UTC), + resourceWatcherService + ); + try { + AWSCredentialsProvider awsCredentialsProvider = S3Service.buildCredentials( + LogManager.getLogger(S3Service.class), + S3ClientSettings.getClientSettings(Settings.EMPTY, randomAlphaOfLength(8)), + webIdentityTokenCredentialsProvider + ); + assertCredentials(awsCredentialsProvider.getCredentials()); + + var latch = new CountDownLatch(1); + String newWebIdentityToken = "88f84342080d4671a511e10ae905b2b0"; + webIdentityTokenCheck.setDelegate(s -> { + if (s.equals(newWebIdentityToken)) { + latch.countDown(); + } + }); + Files.writeString(environment.configFile().resolve("repository-s3/aws-web-identity-token-file"), newWebIdentityToken); + + safeAwait(latch); + assertCredentials(awsCredentialsProvider.getCredentials()); } finally { webIdentityTokenCredentialsProvider.shutdown(); httpServer.stop(0); @@ -149,7 +238,8 @@ public void testSupportRegionalizedEndpoints() throws Exception { getEnvironment(), environmentVariables::get, systemProperties::getOrDefault, - Clock.systemUTC() + Clock.systemUTC(), + resourceWatcherService ); // We can't verify that webIdentityTokenCredentialsProvider's STS client uses the "https://sts.us-west-2.amazonaws.com" // endpoint in a unit test. The client depends on hardcoded RegionalEndpointsOptionResolver that in turn depends diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 085d438618a19..28a48c2968f59 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestGetRepositoriesAction; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.security.AccessController; @@ -274,8 +275,8 @@ protected void assertSnapshotOrGenericThread() { } @Override - S3Service s3Service(Environment environment, Settings nodeSettings) { - return new ProxyS3Service(environment, nodeSettings); + S3Service s3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { + return new ProxyS3Service(environment, nodeSettings, resourceWatcherService); } public static final class ClientAndCredentials extends AmazonS3Wrapper { @@ -291,8 +292,8 @@ public static final class ProxyS3Service extends S3Service { private static final Logger logger = LogManager.getLogger(ProxyS3Service.class); - ProxyS3Service(Environment environment, Settings nodeSettings) { - super(environment, nodeSettings); + ProxyS3Service(Environment environment, Settings nodeSettings, ResourceWatcherService resourceWatcherService) { + super(environment, nodeSettings, resourceWatcherService); } @Override diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 34e14dc718818..58c079515aa47 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.blobstore.AbstractBlobContainerRetriesTestCase; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; +import org.elasticsearch.watcher.ResourceWatcherService; import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; @@ -92,7 +93,7 @@ public class S3BlobContainerRetriesTests extends AbstractBlobContainerRetriesTes @Before public void setUp() throws Exception { shouldErrorOnDns = new AtomicBoolean(false); - service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY) { + service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY, Mockito.mock(ResourceWatcherService.class)) { @Override protected AmazonS3ClientBuilder buildClientBuilder(S3ClientSettings clientSettings) { final AmazonS3ClientBuilder builder = super.buildClientBuilder(clientSettings); @@ -509,6 +510,7 @@ public void testWriteLargeBlobStreaming() throws Exception { assertEquals(blobSize, bytesReceived.get()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104436") public void testReadRetriesAfterMeaningfulProgress() throws Exception { final int maxRetries = between(0, 5); final int bufferSizeBytes = scaledRandomIntBetween( diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java index c48e0dc337d30..31bfd3a5e157f 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ClientSettingsTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.watcher.ResourceWatcherService; import org.mockito.Mockito; import java.io.IOException; @@ -178,7 +179,7 @@ public void testRegionCanBeSet() throws IOException { ); assertThat(settings.get("default").region, is("")); assertThat(settings.get("other").region, is(region)); - try (S3Service s3Service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY)) { + try (var s3Service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY, Mockito.mock(ResourceWatcherService.class))) { AmazonS3Client other = (AmazonS3Client) s3Service.buildClient(settings.get("other")); assertThat(other.getSignerRegionOverride(), is(region)); } diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index ab5edc4608bfd..0a92ed0a28973 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.hamcrest.Matchers; import org.mockito.Mockito; @@ -45,8 +46,8 @@ public void shutdown() { private static class DummyS3Service extends S3Service { - DummyS3Service(Environment environment) { - super(environment, Settings.EMPTY); + DummyS3Service(Environment environment, ResourceWatcherService resourceWatcherService) { + super(environment, Settings.EMPTY, resourceWatcherService); } @Override @@ -125,7 +126,7 @@ private S3Repository createS3Repo(RepositoryMetadata metadata) { return new S3Repository( metadata, NamedXContentRegistry.EMPTY, - new DummyS3Service(Mockito.mock(Environment.class)), + new DummyS3Service(Mockito.mock(Environment.class), Mockito.mock(ResourceWatcherService.class)), BlobStoreTestUtil.mockClusterService(), MockBigArrays.NON_RECYCLING_INSTANCE, new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java index bbdeea6d87631..33e56bcf2180b 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3ServiceTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.watcher.ResourceWatcherService; import org.mockito.Mockito; import java.io.IOException; @@ -18,7 +19,11 @@ public class S3ServiceTests extends ESTestCase { public void testCachedClientsAreReleased() throws IOException { - final S3Service s3Service = new S3Service(Mockito.mock(Environment.class), Settings.EMPTY); + final S3Service s3Service = new S3Service( + Mockito.mock(Environment.class), + Settings.EMPTY, + Mockito.mock(ResourceWatcherService.class) + ); final Settings settings = Settings.builder().put("endpoint", "http://first").build(); final RepositoryMetadata metadata1 = new RepositoryMetadata("first", "s3", settings); final RepositoryMetadata metadata2 = new RepositoryMetadata("second", "s3", settings); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java index 256a5516a2ef2..b2475216a9ce7 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Utils.java @@ -8,6 +8,9 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.SDKGlobalConfiguration; +import com.amazonaws.util.StringUtils; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; @@ -24,7 +27,11 @@ class AwsEc2Utils { private static final Logger logger = LogManager.getLogger(AwsEc2Utils.class); - private static final int CONNECT_TIMEOUT = 2000; + // The timeout can be configured via the AWS_METADATA_SERVICE_TIMEOUT environment variable + private static final int TIMEOUT = Optional.ofNullable(System.getenv(SDKGlobalConfiguration.AWS_METADATA_SERVICE_TIMEOUT_ENV_VAR)) + .filter(StringUtils::hasValue) + .map(s -> Integer.parseInt(s) * 1000) + .orElse(2000); private static final int METADATA_TOKEN_TTL_SECONDS = 10; static final String X_AWS_EC_2_METADATA_TOKEN = "X-aws-ec2-metadata-token"; @@ -39,7 +46,10 @@ static Optional getMetadataToken(String metadataTokenUrl) { try { urlConnection = (HttpURLConnection) new URL(metadataTokenUrl).openConnection(); urlConnection.setRequestMethod("PUT"); - urlConnection.setConnectTimeout(CONNECT_TIMEOUT); + // Use both timeout for connect and read timeout analogous to AWS SDK. + // See com.amazonaws.internal.HttpURLConnection#connectToEndpoint + urlConnection.setConnectTimeout(TIMEOUT); + urlConnection.setReadTimeout(TIMEOUT); urlConnection.setRequestProperty("X-aws-ec2-metadata-token-ttl-seconds", String.valueOf(METADATA_TOKEN_TTL_SECONDS)); } catch (IOException e) { logger.warn("Unable to access the IMDSv2 URI: " + metadataTokenUrl, e); diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java index b9bea564e2720..41b848954b551 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -121,6 +121,19 @@ public void testTokenMetadataApiIsMisbehaving() throws Exception { } } + public void testTokenMetadataApiDoesNotRespond() throws Exception { + try (var metadataServer = new MetadataServer("/metadata", exchange -> { + assertNull(exchange.getRequestHeaders().getFirst("X-aws-ec2-metadata-token")); + exchange.sendResponseHeaders(200, 0); + exchange.getResponseBody().write("us-east-1c".getBytes(StandardCharsets.UTF_8)); + exchange.close(); + }, "/latest/api/token", ex -> { + // Intentionally don't close the connection, so the client has to time out + })) { + assertNodeAttributes(Settings.EMPTY, metadataServer.metadataUri(), metadataServer.tokenUri(), "us-east-1c"); + } + } + public void testTokenMetadataApiIsNotAvailable() throws Exception { try (var metadataServer = metadataServerWithoutToken()) { assertNodeAttributes(Settings.EMPTY, metadataServer.metadataUri(), metadataServer.tokenUri(), "us-east-1c"); diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java index e709b838a26f3..d91f7cf3e9a8d 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; @@ -313,6 +314,14 @@ protected ClientYamlTestExecutionContext createRestTestExecutionContext( public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId) && searchTestFeatureService.clusterHasFeature(featureId); } + + @Override + public Set getAllSupportedFeatures() { + return Sets.intersection( + testFeatureService.getAllSupportedFeatures(), + searchTestFeatureService.getAllSupportedFeatures() + ); + } }; final Set combinedOsSet = Stream.concat(osSet.stream(), Stream.of(searchOs)).collect(Collectors.toSet()); final Set combinedNodeVersions = Stream.concat(nodesVersions.stream(), searchNodeVersions.stream()) diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java index a331d6f54cb4a..ce11112bd4416 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.IOUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; @@ -298,6 +299,14 @@ protected ClientYamlTestExecutionContext createRestTestExecutionContext( public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId) && searchTestFeatureService.clusterHasFeature(featureId); } + + @Override + public Set getAllSupportedFeatures() { + return Sets.intersection( + testFeatureService.getAllSupportedFeatures(), + searchTestFeatureService.getAllSupportedFeatures() + ); + } }; final Set combinedOsSet = Stream.concat(osSet.stream(), Stream.of(searchOs)).collect(Collectors.toSet()); final Set combinedNodeVersions = Stream.concat(nodesVersions.stream(), searchNodeVersions.stream()) diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 16209a73826ca..99b40b0f5c101 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -605,7 +605,7 @@ public void testShrinkAfterUpgrade() throws IOException { *
  • Make sure the document count is correct * */ - public void testRollover() throws IOException { + public void testRollover() throws Exception { if (isRunningAgainstOldCluster()) { client().performRequest( newXContentRequest( @@ -637,9 +637,12 @@ public void testRollover() throws IOException { ) ); - assertThat( - EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices?v")).getEntity()), - containsString("testrollover-000002") + // assertBusy to work around https://github.com/elastic/elasticsearch/issues/104371 + assertBusy( + () -> assertThat( + EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices?v&error_trace")).getEntity()), + containsString("testrollover-000002") + ) ); } diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java index 9c5415f1d5ea9..ca9528005758a 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java @@ -17,8 +17,8 @@ import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.tests.util.TimeUnits; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.aggregations.pipeline.DerivativePipelineAggregationBuilder; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -199,7 +199,7 @@ private void indexDocuments(String idPrefix) throws IOException, InterruptedExce assertTrue(latch.await(30, TimeUnit.SECONDS)); - RefreshResponse refreshResponse = refresh(INDEX_NAME); + BroadcastResponse refreshResponse = refresh(INDEX_NAME); ElasticsearchAssertions.assertNoFailures(refreshResponse); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java index 2d8ff8b747323..0487b282179a9 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -13,7 +13,7 @@ import org.elasticsearch.client.Request; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.features.FeatureService; -import org.junit.BeforeClass; +import org.junit.Before; import java.io.IOException; import java.util.List; @@ -26,11 +26,11 @@ public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { - @BeforeClass - public static void checkMigrationVersion() { - assumeTrue( + @Before + public void checkMigrationVersion() { + assumeFalse( "This checks migrations from before cluster features were introduced", - getOldClusterVersion().before(FeatureService.CLUSTER_FEATURES_ADDED_VERSION) + oldClusterHasFeature(FeatureService.FEATURES_SUPPORTED) ); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 230ab39610b1e..1e9d3d41e6d24 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -16,11 +16,11 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.DesiredNode; import org.elasticsearch.cluster.metadata.DesiredNodeWithStatus; -import org.elasticsearch.cluster.metadata.MetadataFeatures; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; @@ -48,13 +48,11 @@ private enum ProcessorsPrecision { } public void testUpgradeDesiredNodes() throws Exception { - assumeTrue("Desired nodes was introduced in 8.1", getOldClusterVersion().onOrAfter(Version.V_8_1_0)); + assumeTrue("Desired nodes was introduced in 8.1", oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_NODE_API_SUPPORTED)); - var featureVersions = new MetadataFeatures().getHistoricalFeatures(); - - if (getOldClusterVersion().onOrAfter(featureVersions.get(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED))) { + if (oldClusterHasFeature(DesiredNode.DOUBLE_PROCESSORS_SUPPORTED)) { assertUpgradedNodesCanReadDesiredNodes(); - } else if (getOldClusterVersion().onOrAfter(featureVersions.get(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED))) { + } else if (oldClusterHasFeature(DesiredNode.RANGE_FLOAT_PROCESSORS_SUPPORTED)) { assertDesiredNodesUpdatedWithRoundedUpFloatsAreIdempotent(); } else { assertDesiredNodesWithFloatProcessorsAreRejectedInOlderVersions(); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index d5b5e24e2ccde..273196f392064 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.test.ListMatcher; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -229,7 +230,7 @@ private void bulk(String index, String valueSuffix, int count) throws IOExceptio } public void testTsdb() throws IOException { - assumeTrue("indexing time series indices changed in 8.2.0", getOldClusterVersion().onOrAfter(Version.V_8_2_0)); + assumeTrue("indexing time series indices changed in 8.2.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_NEW_INDEX_FORMAT)); StringBuilder bulk = new StringBuilder(); if (isOldCluster()) { @@ -337,7 +338,7 @@ private void assertTsdbAgg(Matcher... expected) throws IOException { } public void testSyntheticSource() throws IOException { - assumeTrue("added in 8.4.0", getOldClusterVersion().onOrAfter(Version.V_8_4_0)); + assumeTrue("added in 8.4.0", oldClusterHasFeature(RestTestLegacyFeatures.SYNTHETIC_SOURCE_SUPPORTED)); if (isOldCluster()) { Request createIndex = new Request("PUT", "/synthetic"); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 5a2c4c783ec85..43bc8eacac98c 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; @@ -69,6 +70,7 @@ public static Iterable parameters() { } private static final Set upgradedNodes = new HashSet<>(); + private static final Set oldClusterFeatures = new HashSet<>(); private static boolean upgradeFailed = false; private static IndexVersion oldIndexVersion; @@ -78,6 +80,13 @@ protected ParameterizedRollingUpgradeTestCase(@Name("upgradedNodes") int upgrade this.requestedUpgradedNodes = upgradedNodes; } + @Before + public void extractOldClusterFeatures() { + if (isOldCluster() && oldClusterFeatures.isEmpty()) { + oldClusterFeatures.addAll(testFeatureService.getAllSupportedFeatures()); + } + } + @Before public void extractOldIndexVersion() throws Exception { if (oldIndexVersion == null && upgradedNodes.isEmpty()) { @@ -138,13 +147,24 @@ public void upgradeNode() throws Exception { public static void resetNodes() { oldIndexVersion = null; upgradedNodes.clear(); + oldClusterFeatures.clear(); upgradeFailed = false; } + @Deprecated // Use the new testing framework and oldClusterHasFeature(feature) instead protected static org.elasticsearch.Version getOldClusterVersion() { return org.elasticsearch.Version.fromString(OLD_CLUSTER_VERSION); } + protected static boolean oldClusterHasFeature(String featureId) { + assert oldClusterFeatures.isEmpty() == false; + return oldClusterFeatures.contains(featureId); + } + + protected static boolean oldClusterHasFeature(NodeFeature feature) { + return oldClusterHasFeature(feature.id()); + } + protected static IndexVersion getOldClusterIndexVersion() { assert oldIndexVersion != null; return oldIndexVersion; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index 4b765849e6ea9..ef80643c82c0d 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -13,7 +13,6 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.util.EntityUtils; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -24,6 +23,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -49,13 +49,10 @@ public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { } public void testSnapshotBasedRecovery() throws Exception { - - assumeFalse( - "Cancel shard allocation command is broken for initial desired balance versions and might allocate shard " - + "on the node where it is not supposed to be. Fixed by https://github.com/elastic/elasticsearch/pull/93635", - getOldClusterVersion() == Version.V_8_6_0 - || getOldClusterVersion() == Version.V_8_6_1 - || getOldClusterVersion() == Version.V_8_7_0 + assumeTrue( + "Cancel shard allocation command is broken for initial versions of the desired_balance allocator", + oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_SUPPORTED) == false + || oldClusterHasFeature(RestTestLegacyFeatures.DESIRED_BALANCED_ALLOCATOR_FIXED) ); final String indexName = "snapshot_based_recovery"; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index b42646164b335..3ce0fc79087c2 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -10,11 +10,11 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.time.Instant; @@ -130,10 +130,7 @@ public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { """; public void testTsdbDataStream() throws Exception { - assumeTrue( - "Skipping version [" + getOldClusterVersion() + "], because TSDB was GA-ed in 8.7.0", - getOldClusterVersion().onOrAfter(Version.V_8_7_0) - ); + assumeTrue("TSDB was GA-ed in 8.7.0", oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE)); String dataStreamName = "k8s"; if (isOldCluster()) { final String INDEX_TEMPLATE = """ @@ -159,8 +156,9 @@ public void testTsdbDataStream() throws Exception { public void testTsdbDataStreamWithComponentTemplate() throws Exception { assumeTrue( - "Skipping version [" + getOldClusterVersion() + "], because TSDB was GA-ed in 8.7.0 and bug was fixed in 8.11.0", - getOldClusterVersion().onOrAfter(Version.V_8_7_0) && getOldClusterVersion().before(Version.V_8_11_0) + "TSDB was GA-ed in 8.7.0 and bug was fixed in 8.11.0", + oldClusterHasFeature(RestTestLegacyFeatures.TSDB_GENERALLY_AVAILABLE) + && (oldClusterHasFeature(RestTestLegacyFeatures.TSDB_EMPTY_TEMPLATE_FIXED) == false) ); String dataStreamName = "template-with-component-template"; if (isOldCluster()) { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 9647bfb739164..3af344051030b 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -10,13 +10,13 @@ import com.carrotsearch.randomizedtesting.annotations.Name; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Strings; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.util.Map; @@ -42,10 +42,7 @@ public void testOldIndexSettings() throws Exception { Request createTestIndex = new Request("PUT", "/" + INDEX_NAME); createTestIndex.setJsonEntity("{\"settings\": {\"index.indexing.slowlog.level\": \"WARN\"}}"); createTestIndex.setOptions(expectWarnings(EXPECTED_WARNING)); - if (getOldClusterVersion().before(Version.V_8_0_0)) { - // create index with settings no longer valid in 8.0 - client().performRequest(createTestIndex); - } else { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED)) { assertTrue( expectThrows(ResponseException.class, () -> client().performRequest(createTestIndex)).getMessage() .contains("unknown setting [index.indexing.slowlog.level]") @@ -53,12 +50,15 @@ public void testOldIndexSettings() throws Exception { Request createTestIndex1 = new Request("PUT", "/" + INDEX_NAME); client().performRequest(createTestIndex1); + } else { + // create index with settings no longer valid in 8.0 + client().performRequest(createTestIndex); } // add some data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { bulk.setOptions(expectWarnings(EXPECTED_WARNING)); } bulk.setJsonEntity(Strings.format(""" @@ -70,7 +70,7 @@ public void testOldIndexSettings() throws Exception { // add some more data Request bulk = new Request("POST", "/_bulk"); bulk.addParameter("refresh", "true"); - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { bulk.setOptions(expectWarnings(EXPECTED_WARNING)); } bulk.setJsonEntity(Strings.format(""" @@ -79,7 +79,7 @@ public void testOldIndexSettings() throws Exception { """, INDEX_NAME)); client().performRequest(bulk); } else { - if (getOldClusterVersion().before(Version.V_8_0_0)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED) == false) { Request createTestIndex = new Request("PUT", "/" + INDEX_NAME + "/_settings"); // update index settings should work createTestIndex.setJsonEntity("{\"index.indexing.slowlog.level\": \"INFO\"}"); @@ -117,7 +117,7 @@ private void assertCount(String index, int countAtLeast) throws IOException { public static void updateIndexSettingsPermittingSlowlogDeprecationWarning(String index, Settings.Builder settings) throws IOException { Request request = new Request("PUT", "/" + index + "/_settings"); request.setJsonEntity(org.elasticsearch.common.Strings.toString(settings.build())); - if (getOldClusterVersion().before(Version.V_7_17_9)) { + if (oldClusterHasFeature(RestTestLegacyFeatures.DEPRECATION_WARNINGS_LEAK_FIXED) == false) { // There is a bug (fixed in 7.17.9 and 8.7.0 where deprecation warnings could leak into ClusterApplierService#applyChanges) // Below warnings are set (and leaking) from an index in this test case request.setOptions(expectVersionSpecificWarnings(v -> { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json index a2bcf67e8611c..85a2a46c8335d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.async_query.json @@ -29,6 +29,11 @@ "type":"string", "description":"The character to use between values within a CSV row. Only valid for the csv format.", "default":false + }, + "drop_null_columns": { + "type": "boolean", + "description": "Should entirely null columns be removed from the results? Their name and type will be returning in a new `all_columns` section.", + "default": false } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json index 8810746851468..573fde5d9a9cd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/esql.query.json @@ -29,6 +29,11 @@ "type":"string", "description":"The character to use between values within a CSV row. Only valid for the csv format.", "default":false + }, + "drop_null_columns": { + "type": "boolean", + "description": "Should entirely null columns be removed from the results? Their name and type will be returning in a new `all_columns` section.", + "default": false } }, "body":{ diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml index f44461e7b8143..c69e22d274c8e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/100_knn_nested_search.yml @@ -6,6 +6,9 @@ setup: indices.create: index: test body: + settings: + index: + number_of_shards: 2 mappings: properties: name: @@ -135,6 +138,172 @@ setup: - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} - match: {hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0"} --- +"nested kNN search inner_hits size > 1": + - skip: + version: ' - 8.12.99' + reason: 'inner_hits on nested kNN search added in 8.13' + + - do: + index: + index: test + id: "4" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "5" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "6" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + - do: + indices.refresh: { } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 5 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + + - match: {hits.total.value: 3} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + + - match: { hits.hits.0.fields.name.0: "moose.jpg" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 5 + num_candidates: 5 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + + - match: {hits.total.value: 5} + # All these initial matches are "moose.jpg", which has 3 nested vectors, but two are closest + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.1.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.2.fields.name.0: "moose.jpg"} + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.2.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.3.fields.name.0: "moose.jpg"} + - length: { hits.hits.3.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.3.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.3.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + # Rabbit only has one passage vector + - match: {hits.hits.4.fields.name.0: "rabbit.jpg"} + - length: { hits.hits.4.inner_hits.nested.hits.hits: 1 } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + k: 3 + num_candidates: 3 + filter: {term: {name: "cow.jpg"}} + inner_hits: {size: 3, fields: ["nested.paragraph_id"], _source: false} + + - match: {hits.total.value: 1} + - match: { hits.hits.0._id: "1" } + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "1" } +--- +"nested kNN search inner_hits & boosting": + - skip: + version: ' - 8.12.99' + reason: 'inner_hits on nested kNN search added in 8.13' + features: close_to + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 5 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + + - close_to: { hits.hits.0._score: {value: 0.00909090, error: 0.00001} } + - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: {value: 0.00909090, error: 0.00001} } + - close_to: { hits.hits.1._score: {value: 0.0021519717, error: 0.00001} } + - close_to: { hits.hits.1.inner_hits.nested.hits.hits.0._score: {value: 0.0021519717, error: 0.00001} } + - close_to: { hits.hits.2._score: {value: 0.00001, error: 0.00001} } + - close_to: { hits.hits.2.inner_hits.nested.hits.hits.0._score: {value: 0.00001, error: 0.00001} } + + - do: + search: + index: test + body: + fields: [ "name" ] + knn: + field: nested.vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 3 + num_candidates: 5 + boost: 2 + inner_hits: {size: 2, fields: ["nested.paragraph_id"], _source: false} + - close_to: { hits.hits.0._score: {value: 0.0181818, error: 0.00001} } + - close_to: { hits.hits.0.inner_hits.nested.hits.hits.0._score: {value: 0.0181818, error: 0.00001} } + - close_to: { hits.hits.1._score: {value: 0.0043039434, error: 0.00001} } + - close_to: { hits.hits.1.inner_hits.nested.hits.hits.0._score: {value: 0.0043039434, error: 0.00001} } + - close_to: { hits.hits.2._score: {value: 0.00002, error: 0.00001} } + - close_to: { hits.hits.2.inner_hits.nested.hits.hits.0._score: {value: 0.00002, error: 0.00001} } +--- "nested kNN search inner_hits & profiling": - skip: version: ' - 8.12.99' @@ -144,7 +313,6 @@ setup: index: test body: profile: true - _source: false fields: [ "name" ] knn: field: nested.vector diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml index 435291b454d08..5d07c0c8b5f9d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/130_knn_query_nested_search.yml @@ -186,7 +186,6 @@ setup: - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } --- - "nested kNN search post-filtered on nested fields DOES NOT work": - do: search: @@ -211,3 +210,112 @@ setup: # TODO: fix it on Lucene level so nested knn respects num_candidates # or do pre-filtering - match: {hits.total.value: 0} +--- +"nested kNN search inner_hits size > 1": + - skip: + version: ' - 8.12.99' + reason: 'inner_hits on nested kNN search added in 8.13' + + - do: + index: + index: test + id: "4" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "5" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + + - do: + index: + index: test + id: "6" + body: + name: moose.jpg + nested: + - paragraph_id: 0 + vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] + - paragraph_id: 2 + vector: [ 0, 100.0, 0, 14.8, -156.0 ] + - paragraph_id: 3 + vector: [ 0, 1.0, 0, 1.8, -15.0 ] + - do: + indices.refresh: { } + + - do: + search: + index: test + size: 3 + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 5 + inner_hits: { size: 2, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 5} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + + + - do: + search: + index: test + size: 5 + body: + fields: [ "name" ] + query: + nested: + path: nested + query: + knn: + field: nested.vector + query_vector: [ -0.5, 90.0, -10, 14.8, -156.0 ] + num_candidates: 5 + inner_hits: { size: 2, "fields": [ "nested.paragraph_id" ], _source: false } + + - match: {hits.total.value: 5} + # All these initial matches are "moose.jpg", which has 3 nested vectors, but two are closest + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + - length: { hits.hits.0.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.0.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + - length: { hits.hits.1.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.1.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.1.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.2.fields.name.0: "moose.jpg"} + - length: { hits.hits.2.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.2.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.2.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + - match: {hits.hits.3.fields.name.0: "moose.jpg"} + - length: { hits.hits.3.inner_hits.nested.hits.hits: 2 } + - match: { hits.hits.3.inner_hits.nested.hits.hits.0.fields.nested.0.paragraph_id.0: "0" } + - match: { hits.hits.3.inner_hits.nested.hits.hits.1.fields.nested.0.paragraph_id.0: "2" } + # Rabbit only has one passage vector + - match: {hits.hits.4.fields.name.0: "rabbit.jpg"} + - length: { hits.hits.4.inner_hits.nested.hits.hits: 1 } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java index b20f658a01510..cb3eee3c60c23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.GroupedActionListener; @@ -166,7 +165,7 @@ public void testBanOnlyNodesWithOutstandingDescendantTasks() throws Exception { ActionFuture rootTaskFuture = client().execute(TransportTestAction.ACTION, rootRequest); Set pendingRequests = allowPartialRequest(rootRequest); TaskId rootTaskId = getRootTaskId(rootRequest); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(rootTaskId) .waitForCompletion(true) .execute(); @@ -215,10 +214,10 @@ public void testCancelTaskMultipleTimes() throws Exception { ActionFuture mainTaskFuture = client().execute(TransportTestAction.ACTION, rootRequest); TaskId taskId = getRootTaskId(rootRequest); allowPartialRequest(rootRequest); - CancelTasksResponse resp = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).waitForCompletion(false).get(); + ListTasksResponse resp = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).waitForCompletion(false).get(); assertThat(resp.getTaskFailures(), empty()); assertThat(resp.getNodeFailures(), empty()); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(taskId) .waitForCompletion(true) .execute(); @@ -226,7 +225,7 @@ public void testCancelTaskMultipleTimes() throws Exception { allowEntireRequest(rootRequest); assertThat(cancelFuture.actionGet().getTaskFailures(), empty()); waitForRootTask(mainTaskFuture, false); - CancelTasksResponse cancelError = clusterAdmin().prepareCancelTasks() + ListTasksResponse cancelError = clusterAdmin().prepareCancelTasks() .setTargetTaskId(taskId) .waitForCompletion(randomBoolean()) .get(); @@ -245,7 +244,7 @@ public void testDoNotWaitForCompletion() throws Exception { allowPartialRequest(rootRequest); } boolean waitForCompletion = randomBoolean(); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(taskId) .waitForCompletion(waitForCompletion) .execute(); @@ -311,7 +310,7 @@ public void testRemoveBanParentsOnDisconnect() throws Exception { client().execute(TransportTestAction.ACTION, rootRequest); Set pendingRequests = allowPartialRequest(rootRequest); TaskId rootTaskId = getRootTaskId(rootRequest); - ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() + ActionFuture cancelFuture = clusterAdmin().prepareCancelTasks() .setTargetTaskId(rootTaskId) .waitForCompletion(true) .execute(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 21497b2e6fcfb..884f6dbcd677e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -503,7 +502,7 @@ public void testTasksCancellation() throws Exception { ); logger.info("--> cancelling the main test task"); - CancelTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setActions(TEST_TASK_ACTION.name()).get(); + ListTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setActions(TEST_TASK_ACTION.name()).get(); assertEquals(1, cancelTasksResponse.getTasks().size()); expectThrows(TaskCancelledException.class, future); @@ -722,7 +721,7 @@ public void testTasksWaitForAllTask() throws Exception { .map(PersistentTasksCustomMetadata.PersistentTask::getExecutorNode) .collect(Collectors.toSet()); // Spin up a request to wait for all tasks in the cluster to make sure it doesn't cause an infinite loop - ListTasksResponse response = clusterAdmin().prepareListTasks().setWaitForCompletion(true).setTimeout(timeValueSeconds(10)).get(); + ListTasksResponse response = clusterAdmin().prepareListTasks().setWaitForCompletion(true).setTimeout(timeValueSeconds(1)).get(); // We expect the nodes that are running always-running-tasks to report FailedNodeException and fail to list their tasks assertThat(response.getNodeFailures().size(), equalTo(nodesRunningTasks.size())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java index 4d37f75894d56..e0805148a47e3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.cache.clear; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -33,7 +34,7 @@ public void testClearIndicesCacheWithBlocks() { for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE)) { try { enableIndexBlock("test", blockSetting); - ClearIndicesCacheResponse clearIndicesCacheResponse = indicesAdmin().prepareClearCache("test") + BroadcastResponse clearIndicesCacheResponse = indicesAdmin().prepareClearCache("test") .setFieldDataCache(true) .setQueryCache(true) .setFieldDataCache(true) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java index 69d4f7aaef329..4e2fade87196f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/flush/FlushBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.flush; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -44,7 +45,7 @@ public void testFlushWithBlocks() { )) { try { enableIndexBlock("test", blockSetting); - FlushResponse response = indicesAdmin().prepareFlush("test").get(); + BroadcastResponse response = indicesAdmin().prepareFlush("test").get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); } finally { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java index a3474afc96c51..b5d8ef0308b91 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.forcemerge; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -50,7 +51,7 @@ public void testForceMergeWithBlocks() { for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY_ALLOW_DELETE)) { try { enableIndexBlock("test", blockSetting); - ForceMergeResponse response = indicesAdmin().prepareForceMerge("test").get(); + BaseBroadcastResponse response = indicesAdmin().prepareForceMerge("test").get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); } finally { @@ -70,7 +71,7 @@ public void testForceMergeWithBlocks() { // Merging all indices is blocked when the cluster is read-only try { - ForceMergeResponse response = indicesAdmin().prepareForceMerge().get(); + BaseBroadcastResponse response = indicesAdmin().prepareForceMerge().get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java index 229558e9f4242..22bc37b2fb946 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.action.admin.indices.forcemerge; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -51,13 +51,13 @@ public void testForceMergeUUIDConsistent() throws IOException { assertThat(getForceMergeUUID(primary), nullValue()); assertThat(getForceMergeUUID(replica), nullValue()); - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge(index).setMaxNumSegments(1).get(); + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge(index).setMaxNumSegments(1).get(); assertThat(forceMergeResponse.getFailedShards(), is(0)); assertThat(forceMergeResponse.getSuccessfulShards(), is(2)); // Force flush to force a new commit that contains the force flush UUID - final FlushResponse flushResponse = indicesAdmin().prepareFlush(index).setForce(true).get(); + final BroadcastResponse flushResponse = indicesAdmin().prepareFlush(index).setForce(true).get(); assertThat(flushResponse.getFailedShards(), is(0)); assertThat(flushResponse.getSuccessfulShards(), is(2)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java index 41abfc1219199..2067038e0fdd2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/refresh/RefreshBlocksIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.refresh; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; @@ -39,7 +40,7 @@ public void testRefreshWithBlocks() { )) { try { enableIndexBlock("test", blockSetting); - RefreshResponse response = indicesAdmin().prepareRefresh("test").get(); + BroadcastResponse response = indicesAdmin().prepareRefresh("test").get(); assertNoFailures(response); assertThat(response.getSuccessfulShards(), equalTo(numShards.totalNumShards)); } finally { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java index 8843e7ff39bc6..895a60133251f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java @@ -12,12 +12,12 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsGroup; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.refresh.TransportUnpromotableShardRefreshAction; import org.elasticsearch.action.search.ClosePointInTimeRequest; import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.TransportClosePointInTimeAction; import org.elasticsearch.action.search.TransportOpenPointInTimeAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; @@ -699,7 +699,7 @@ public void testRefreshFailsIfUnpromotableDisconnects() throws Exception { }); } - RefreshResponse response = indicesAdmin().prepareRefresh(INDEX_NAME).get(); + BroadcastResponse response = indicesAdmin().prepareRefresh(INDEX_NAME).get(); assertThat( "each unpromotable replica shard should be added to the shard failures", response.getFailedShards(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index d3001f485846e..709f6b866ba28 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -10,17 +10,15 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESIntegTestCase; @@ -63,7 +61,7 @@ public void testIndexActions() throws Exception { assertThat(indexResponse.getIndex(), equalTo(getConcreteIndexName())); assertThat(indexResponse.getId(), equalTo("1")); logger.info("Refreshing"); - RefreshResponse refreshResponse = refresh(); + BroadcastResponse refreshResponse = refresh(); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); logger.info("--> index exists?"); @@ -72,7 +70,7 @@ public void testIndexActions() throws Exception { assertThat(indexExists("test1234565"), equalTo(false)); logger.info("Clearing cache"); - ClearIndicesCacheResponse clearIndicesCacheResponse = indicesAdmin().clearCache( + BroadcastResponse clearIndicesCacheResponse = indicesAdmin().clearCache( new ClearIndicesCacheRequest("test").fieldDataCache(true).queryCache(true) ).actionGet(); assertNoFailures(clearIndicesCacheResponse); @@ -80,7 +78,7 @@ public void testIndexActions() throws Exception { logger.info("Force Merging"); waitForRelocation(ClusterHealthStatus.GREEN); - ForceMergeResponse mergeResponse = forceMerge(); + BaseBroadcastResponse mergeResponse = forceMerge(); assertThat(mergeResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); GetResponse getResult; @@ -130,7 +128,7 @@ public void testIndexActions() throws Exception { client().index(new IndexRequest("test").id("2").source(source("2", "test2"))).actionGet(); logger.info("Flushing"); - FlushResponse flushResult = indicesAdmin().prepareFlush("test").get(); + BroadcastResponse flushResult = indicesAdmin().prepareFlush("test").get(); assertThat(flushResult.getSuccessfulShards(), equalTo(numShards.totalNumShards)); assertThat(flushResult.getFailedShards(), equalTo(0)); logger.info("Refreshing"); @@ -220,7 +218,7 @@ public void testBulk() throws Exception { assertThat(bulkResponse.getItems()[5].getIndex(), equalTo(getConcreteIndexName())); waitForRelocation(ClusterHealthStatus.GREEN); - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); assertNoFailures(refreshResponse); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.totalNumShards)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java index d4fe2fcb4d4c1..c9809574002c8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; @@ -18,6 +17,7 @@ import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; @@ -641,7 +641,7 @@ public void testGetFieldsComplexField() throws Exception { ensureGreen(); logger.info("flushing"); - FlushResponse flushResponse = indicesAdmin().prepareFlush("my-index").setForce(true).get(); + BroadcastResponse flushResponse = indicesAdmin().prepareFlush("my-index").setForce(true).get(); if (flushResponse.getSuccessfulShards() == 0) { StringBuilder sb = new StringBuilder("failed to flush at least one shard. total shards [").append( flushResponse.getTotalShards() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java index 3dd9feff9ce25..1c715beb04356 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndexingMemoryControllerIT.java @@ -7,7 +7,7 @@ */ package org.elasticsearch.indices; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.BigArrays; @@ -99,7 +99,7 @@ public void testDeletesAloneCanTriggerRefresh() throws Exception { prepareIndex("index").setId(Integer.toString(i)).setSource("field", "value").get(); } // Force merge so we know all merges are done before we start deleting: - ForceMergeResponse r = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).get(); + BaseBroadcastResponse r = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).get(); assertNoFailures(r); final RefreshStats refreshStats = shard.refreshStats(); for (int i = 0; i < 100; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 0b99e3ba3ffcf..62e6cb59994b2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.indices; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; @@ -132,7 +132,7 @@ public void testQueryRewrite() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -202,7 +202,7 @@ public void testQueryRewriteMissingValues() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -269,7 +269,7 @@ public void testQueryRewriteDates() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -343,7 +343,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { assertCacheState(client, "index-3", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = client.admin() + BroadcastResponse forceMergeResponse = client.admin() .indices() .prepareForceMerge("index-1", "index-2", "index-3") .setFlush(true) @@ -424,7 +424,7 @@ public void testCanCache() throws Exception { assertCacheState(client, "index", 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); ensureSearchable("index"); @@ -529,7 +529,7 @@ public void testCacheWithFilteredAlias() { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); client.prepareIndex("index").setId("1").setRouting("1").setSource("created_at", DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get(); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); refresh(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java index a328148180107..17b18bf9af1ee 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/flush/FlushIT.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; @@ -54,9 +54,9 @@ public void testWaitIfOngoing() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(10); final CopyOnWriteArrayList errors = new CopyOnWriteArrayList<>(); for (int j = 0; j < 10; j++) { - indicesAdmin().prepareFlush("test").execute(new ActionListener() { + indicesAdmin().prepareFlush("test").execute(new ActionListener<>() { @Override - public void onResponse(FlushResponse flushResponse) { + public void onResponse(BroadcastResponse flushResponse) { try { // don't use assertAllSuccessful it uses a randomized context that belongs to a different thread assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index e5a8246ba6033..70cd143686dc8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -9,8 +9,8 @@ package org.elasticsearch.indices.mapping; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; @@ -82,7 +82,7 @@ public void testDynamicUpdates() throws Exception { indexRandom(true, false, indexRequests); logger.info("checking all the documents are there"); - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh().get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh().get(); assertThat(refreshResponse.getFailedShards(), equalTo(0)); assertHitCount(prepareSearch("test").setSize(0), recCount); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 2935efb4808a7..22f987cc855cc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -15,9 +15,9 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; @@ -134,7 +134,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc } logger.info("Start Refresh"); // don't assert on failures here - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0; logger.info( "Refresh failed: [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java index a98297e8b49ae..e70c48ce8184e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags.Flag; @@ -31,6 +30,7 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -1138,7 +1138,7 @@ public void testFilterCacheStats() throws Exception { }); flush("index"); logger.info("--> force merging to a single segment"); - ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge("index").setFlush(true).setMaxNumSegments(1).get(); assertAllSuccessful(forceMergeResponse); logger.info("--> refreshing"); refresh(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index d47c68690bab8..782aafece4399 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -8,11 +8,11 @@ package org.elasticsearch.recovery; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -405,7 +405,7 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat private void refreshAndAssert() throws Exception { assertBusy(() -> { - RefreshResponse actionGet = indicesAdmin().prepareRefresh().get(); + BroadcastResponse actionGet = indicesAdmin().prepareRefresh().get(); assertAllSuccessful(actionGet); }, 5, TimeUnit.MINUTES); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java index bd69aebcd415e..baa721cbbabd2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/SimpleRecoveryIT.java @@ -9,12 +9,11 @@ package org.elasticsearch.recovery; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.XContentType; @@ -43,12 +42,12 @@ public void testSimpleRecovery() throws Exception { NumShards numShards = getNumShards("test"); client().index(new IndexRequest("test").id("1").source(source("1", "test"), XContentType.JSON)).actionGet(); - FlushResponse flushResponse = indicesAdmin().flush(new FlushRequest("test")).actionGet(); + BroadcastResponse flushResponse = indicesAdmin().flush(new FlushRequest("test")).actionGet(); assertThat(flushResponse.getTotalShards(), equalTo(numShards.totalNumShards)); assertThat(flushResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(flushResponse.getFailedShards(), equalTo(0)); client().index(new IndexRequest("test").id("2").source(source("2", "test"), XContentType.JSON)).actionGet(); - RefreshResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); + BroadcastResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); assertThat(refreshResponse.getTotalShards(), equalTo(numShards.totalNumShards)); assertThat(refreshResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 97a400709cde7..68d00321848eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -8,7 +8,7 @@ package org.elasticsearch.search.basic; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.index.query.QueryBuilders; @@ -55,7 +55,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) createIndex("test"); } prepareIndex("test").setId(id).setSource("field", "test").get(); - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").get(); // at least one shard should be successful when refreshing assertThat(refreshResponse.getSuccessfulShards(), greaterThanOrEqualTo(1)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 6ebfc61830269..6985ebb17386c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -14,8 +14,8 @@ import org.apache.lucene.tests.util.English; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -111,7 +111,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe } logger.info("Start Refresh"); // don't assert on failures here - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0; logger.info( "Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index 33ef75b317e33..07d976437c24c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -13,8 +13,8 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -135,7 +135,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc ESIntegTestCase.NumShards numShards = getNumShards("test"); logger.info("Start Refresh"); // don't assert on failures here - final RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); + final BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("test").execute().get(); final boolean refreshFailed = refreshResponse.getShardFailures().length != 0 || refreshResponse.getFailedShards() != 0; logger.info( "Refresh failed [{}] numShardsFailed: [{}], shardFailuresLength: [{}], successfulShards: [{}], totalShards: [{}] ", diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index c4b0346170949..303030a523662 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -12,10 +12,10 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; @@ -50,7 +50,7 @@ public void testFailedSearchWithWrongQuery() throws Exception { for (int i = 0; i < 100; i++) { index(client(), Integer.toString(i), "test", i); } - RefreshResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); + BroadcastResponse refreshResponse = indicesAdmin().refresh(new RefreshRequest("test")).actionGet(); assertThat(refreshResponse.getTotalShards(), equalTo(test.totalNumShards)); assertThat(refreshResponse.getSuccessfulShards(), equalTo(test.numPrimaries)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java index cf8d81f406f91..eedda05dcb102 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -268,7 +268,7 @@ public void testCancel() throws Exception { final CancelTasksRequest cancelRequest = new CancelTasksRequest().setTargetTaskId(rootTask.taskId()); cancelRequest.setWaitForCompletion(randomBoolean()); - final ActionFuture cancelFuture = client().admin().cluster().cancelTasks(cancelRequest); + final ActionFuture cancelFuture = client().admin().cluster().cancelTasks(cancelRequest); assertBusy(() -> { final Iterable transportServices = cluster("cluster_a").getInstances(TransportService.class); for (TransportService transportService : transportServices) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 8f178397f508b..1fe128da6889c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -112,7 +112,7 @@ public void testConsistentHitsWithSameSeed() throws Exception { CoreMatchers.equalTo(0) ); final int hitCount = response.getHits().getHits().length; - final SearchHit[] currentHits = response.getHits().getHits(); + final SearchHit[] currentHits = response.getHits().asUnpooled().getHits(); ArrayUtil.timSort(currentHits, (o1, o2) -> { // for tie-breaking we have to resort here since if the score is // identical we rely on collection order which might change. diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java index 3dd9e68cf08af..f830ca9ac0cb6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -66,8 +67,9 @@ public void testSimpleNested() throws Exception { refresh(); assertResponse( - prepareSearch("test").setKnnSearch(List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null))) - .setAllowPartialSearchResults(false), + prepareSearch("test").setKnnSearch( + List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null).innerHit(new InnerHitBuilder())) + ).setAllowPartialSearchResults(false), response -> assertThat(response.getHits().getHits().length, greaterThan(0)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 81659323e2471..20c5c11f36756 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -12,13 +12,13 @@ import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.FieldMemoryStats; import org.elasticsearch.common.settings.Settings; @@ -1267,7 +1267,7 @@ public void testPrunedSegments() throws IOException { .get(); // we have 2 docs in a segment... prepareIndex(INDEX).setId("2").setSource(jsonBuilder().startObject().field("somefield", "somevalue").endObject()).get(); - ForceMergeResponse actionGet = indicesAdmin().prepareForceMerge().setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse actionGet = indicesAdmin().prepareForceMerge().setFlush(true).setMaxNumSegments(1).get(); assertAllSuccessful(actionGet); refresh(); // update the first one and then merge.. the target segment will have no value in FIELD diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java index b126e4e51128f..df4d52727384f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/BlobStoreIncrementalityIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.bulk.BulkItemResponse; @@ -20,6 +19,7 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; @@ -159,7 +159,7 @@ public void testForceMergeCausesFullSnapshot() throws Exception { clusterAdmin().prepareCreateSnapshot(repo, snapshot1).setIndices(indexName).setWaitForCompletion(true).get(); logger.info("--> force merging down to a single segment"); - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).setFlush(true).get(); + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).setFlush(true).get(); assertThat(forceMergeResponse.getFailedShards(), is(0)); final String snapshot2 = "snap-2"; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index ed070c3224aa2..c13891728f315 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -21,11 +21,11 @@ import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.RestoreInProgress; @@ -119,7 +119,7 @@ public void testBasicWorkFlow() throws Exception { createIndexWithRandomDocs("test-idx-2", 100); createIndexWithRandomDocs("test-idx-3", 100); - ActionFuture flushResponseFuture = null; + ActionFuture flushResponseFuture = null; if (randomBoolean()) { ArrayList indicesToFlush = new ArrayList<>(); for (int i = 1; i < 4; i++) { diff --git a/server/src/main/java/org/elasticsearch/Build.java b/server/src/main/java/org/elasticsearch/Build.java index 0b8cd149744e3..89082389c5805 100644 --- a/server/src/main/java/org/elasticsearch/Build.java +++ b/server/src/main/java/org/elasticsearch/Build.java @@ -204,8 +204,7 @@ static URL getElasticsearchCodeSourceLocation() { public static Build readBuild(StreamInput in) throws IOException { final String flavor; - if (in.getTransportVersion().before(TransportVersions.V_8_3_0) - || in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().before(TransportVersions.V_8_3_0) || in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { flavor = in.readString(); } else { flavor = "default"; @@ -235,7 +234,7 @@ public static Build readBuild(StreamInput in) throws IOException { version = versionMatcher.group(1); qualifier = versionMatcher.group(2); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { minWireVersion = in.readString(); minIndexVersion = in.readString(); displayString = in.readString(); @@ -252,7 +251,7 @@ public static Build readBuild(StreamInput in) throws IOException { public static void writeBuild(Build build, StreamOutput out) throws IOException { if (out.getTransportVersion().before(TransportVersions.V_8_3_0) - || out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + || out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeString(build.flavor()); } out.writeString(build.type().displayName()); @@ -266,7 +265,7 @@ public static void writeBuild(Build build, StreamOutput out) throws IOException out.writeBoolean(build.isSnapshot()); out.writeString(build.qualifiedVersion()); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeString(build.minWireCompatVersion()); out.writeString(build.minIndexCompatVersion()); out.writeString(build.displayString()); diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 50a5f7420847b..237f50befe4bd 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1838,13 +1838,13 @@ private enum ElasticsearchExceptionHandle { org.elasticsearch.http.HttpHeadersValidationException.class, org.elasticsearch.http.HttpHeadersValidationException::new, 169, - TransportVersions.V_8_500_020 + TransportVersions.V_8_9_X ), ROLE_RESTRICTION_EXCEPTION( ElasticsearchRoleRestrictionException.class, ElasticsearchRoleRestrictionException::new, 170, - TransportVersions.V_8_500_020 + TransportVersions.V_8_9_X ), API_NOT_AVAILABLE_EXCEPTION(ApiNotAvailableException.class, ApiNotAvailableException::new, 171, TransportVersions.V_8_500_065), RECOVERY_COMMIT_TOO_NEW_EXCEPTION( diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index c98990e4dfd47..b368cae7e89f1 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -92,8 +92,8 @@ static TransportVersion def(int id) { * READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW TRANSPORT VERSIONS * Detached transport versions added below here. */ - public static final TransportVersion V_8_500_020 = def(8_500_020); - public static final TransportVersion V_8_500_061 = def(8_500_061); + public static final TransportVersion V_8_9_X = def(8_500_020); + public static final TransportVersion V_8_10_X = def(8_500_061); public static final TransportVersion V_8_500_062 = def(8_500_062); public static final TransportVersion V_8_500_063 = def(8_500_063); public static final TransportVersion V_8_500_064 = def(8_500_064); @@ -170,6 +170,7 @@ static TransportVersion def(int id) { public static final TransportVersion MISSED_INDICES_UPDATE_EXCEPTION_ADDED = def(8_558_00_0); public static final TransportVersion INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED = def(8_559_00_0); public static final TransportVersion ENRICH_ELASTICSEARCH_VERSION_REMOVED = def(8_560_00_0); + public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH = def(8_560_00_1); public static final TransportVersion NODE_STATS_REQUEST_SIMPLIFIED = def(8_561_00_0); public static final TransportVersion TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED = def(8_562_00_0); public static final TransportVersion ESQL_ASYNC_QUERY = def(8_563_00_0); @@ -184,7 +185,10 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED = def(8_572_00_0); public static final TransportVersion ESQL_ENRICH_POLICY_CCQ_MODE = def(8_573_00_0); public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ = def(8_574_00_0); - public static final TransportVersion RETRIEVERS_ADDED = def(8_575_00_0); + public static final TransportVersion PEERFINDER_REPORTS_PEERS_MASTERS = def(8_575_00_0); + public static final TransportVersion ESQL_MULTI_CLUSTERS_ENRICH = def(8_576_00_0); + public static final TransportVersion NESTED_KNN_MORE_INNER_HITS = def(8_577_00_0); + public static final TransportVersion RETRIEVERS_ADDED = def(8_578_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java index dab46aed5b4bc..2a9449b35c7b5 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteRequest.java @@ -24,9 +24,11 @@ import org.elasticsearch.index.shard.ShardId; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Locale; import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.action.index.IndexRequest.MAX_DOCUMENT_ID_LENGTH_IN_BYTES; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -314,4 +316,19 @@ static ActionRequestValidationException validateSeqNoBasedCASParams( return validationException; } + + static ActionRequestValidationException validateDocIdLength(String id, ActionRequestValidationException validationException) { + if (id != null && id.getBytes(StandardCharsets.UTF_8).length > MAX_DOCUMENT_ID_LENGTH_IN_BYTES) { + validationException = addValidationError( + "id [" + + id + + "] is too long, must be no longer than " + + MAX_DOCUMENT_ID_LENGTH_IN_BYTES + + " bytes but was: " + + id.getBytes(StandardCharsets.UTF_8).length, + validationException + ); + } + return validationException; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index b8d1a431f92e8..cdb9191bd8d70 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -117,7 +117,7 @@ public NodeStats(StreamInput in) throws IOException { ingestStats = in.readOptionalWriteable(IngestStats::read); adaptiveSelectionStats = in.readOptionalWriteable(AdaptiveSelectionStats::new); indexingPressureStats = in.readOptionalWriteable(IndexingPressureStats::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { repositoriesStats = in.readOptionalWriteable(RepositoriesStats::new); } else { repositoriesStats = null; @@ -294,7 +294,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(ingestStats); out.writeOptionalWriteable(adaptiveSelectionStats); out.writeOptionalWriteable(indexingPressureStats); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(repositoriesStats); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java index 86d0206d62b65..3cba83305c0fa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java @@ -9,16 +9,17 @@ package org.elasticsearch.action.admin.cluster.node.tasks.cancel; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; /** * ActionType for cancelling running tasks */ -public class CancelTasksAction extends ActionType { +public class CancelTasksAction extends ActionType { public static final CancelTasksAction INSTANCE = new CancelTasksAction(); public static final String NAME = "cluster:admin/tasks/cancel"; private CancelTasksAction() { - super(NAME, CancelTasksResponse::new); + super(NAME, ListTasksResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java index 45fc4e352a4ba..5fdd50e0c9e66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequestBuilder.java @@ -8,13 +8,14 @@ package org.elasticsearch.action.admin.cluster.node.tasks.cancel; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.tasks.TasksRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; /** * Builder for the request to cancel tasks running on the specified nodes */ -public class CancelTasksRequestBuilder extends TasksRequestBuilder { +public class CancelTasksRequestBuilder extends TasksRequestBuilder { public CancelTasksRequestBuilder(ElasticsearchClient client) { super(client, CancelTasksAction.INSTANCE, new CancelTasksRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java deleted file mode 100644 index a53ed8dacc36c..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksResponse.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.node.tasks.cancel; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -/** - * Returns the list of tasks that were cancelled - */ -public class CancelTasksResponse extends ListTasksResponse { - - private static final ConstructingObjectParser PARSER = setupParser( - "cancel_tasks_response", - CancelTasksResponse::new - ); - - public CancelTasksResponse(StreamInput in) throws IOException { - super(in); - } - - public CancelTasksResponse( - List tasks, - List taskFailures, - List nodeFailures - ) { - super(tasks, taskFailures, nodeFailures); - } - - public static CancelTasksResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index aa7c19cf35514..1f3271be79797 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.service.ClusterService; @@ -31,7 +32,7 @@ * For a task to be cancellable it has to return an instance of * {@link CancellableTask} from {@link TransportRequest#createTask} */ -public class TransportCancelTasksAction extends TransportTasksAction { +public class TransportCancelTasksAction extends TransportTasksAction { @Inject public TransportCancelTasksAction(ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) { @@ -41,7 +42,7 @@ public TransportCancelTasksAction(ClusterService clusterService, TransportServic transportService, actionFilters, CancelTasksRequest::new, - CancelTasksResponse::new, + ListTasksResponse::new, TaskInfo::from, // Cancellation is usually lightweight, and runs on the transport thread if the task didn't even start yet, but some // implementations of CancellableTask#onCancelled() are nontrivial so we use GENERIC here. TODO could it be SAME? @@ -50,13 +51,13 @@ public TransportCancelTasksAction(ClusterService clusterService, TransportServic } @Override - protected CancelTasksResponse newResponse( + protected ListTasksResponse newResponse( CancelTasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions ) { - return new CancelTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); + return new ListTasksResponse(tasks, taskOperationFailures, failedNodeExceptions); } protected List processTasks(CancelTasksRequest request) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java index 81a26999d2907..9105c20044223 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java @@ -42,7 +42,7 @@ */ public final class AnalysisStats implements ToXContentFragment, Writeable { - private static final TransportVersion SYNONYM_SETS_VERSION = TransportVersions.V_8_500_061; + private static final TransportVersion SYNONYM_SETS_VERSION = TransportVersions.V_8_10_X; private static final Set SYNONYM_FILTER_TYPES = Set.of("synonym", "synonym_graph"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java index e2894f072011c..be33fada9c934 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java @@ -25,7 +25,7 @@ public class ReloadAnalyzersRequest extends BroadcastRequest { +public class ClearIndicesCacheAction extends ActionType { public static final ClearIndicesCacheAction INSTANCE = new ClearIndicesCacheAction(); public static final String NAME = "indices:admin/cache/clear"; private ClearIndicesCacheAction() { - super(NAME, ClearIndicesCacheResponse::new); + super(NAME, BroadcastResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java index 464c22d1119b0..fb6139c0ae4e3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheRequestBuilder.java @@ -9,11 +9,12 @@ package org.elasticsearch.action.admin.indices.cache.clear; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; public class ClearIndicesCacheRequestBuilder extends BroadcastOperationRequestBuilder< ClearIndicesCacheRequest, - ClearIndicesCacheResponse, + BroadcastResponse, ClearIndicesCacheRequestBuilder> { public ClearIndicesCacheRequestBuilder(ElasticsearchClient client) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java deleted file mode 100644 index df0a298c87eeb..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponse.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.cache.clear; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * The response of a clear cache action. - */ -public class ClearIndicesCacheResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "clear_cache", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new ClearIndicesCacheResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - } - ); - - static { - declareBroadcastFields(PARSER); - } - - ClearIndicesCacheResponse(StreamInput in) throws IOException { - super(in); - } - - ClearIndicesCacheResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static ClearIndicesCacheResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index 86f0093598744..faeaf0bdb575a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -32,7 +33,7 @@ */ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAction< ClearIndicesCacheRequest, - ClearIndicesCacheResponse, + BroadcastResponse, TransportBroadcastByNodeAction.EmptyResult> { private final IndicesService indicesService; @@ -64,11 +65,11 @@ protected EmptyResult readShardResult(StreamInput in) throws IOException { } @Override - protected ResponseFactory getResponseFactory( + protected ResponseFactory getResponseFactory( ClearIndicesCacheRequest request, ClusterState clusterState ) { - return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new ClearIndicesCacheResponse( + return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new BroadcastResponse( totalShards, successfulShards, failedShards, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java index 3429457dd7e0f..f0596d061aeb3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java @@ -46,7 +46,7 @@ protected static void declareFields(Constructing private final String index; - protected CreateIndexResponse(StreamInput in) throws IOException { + public CreateIndexResponse(StreamInput in) throws IOException { super(in, true); index = in.readString(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java index 27d96e5feddd5..313fb23c45a6d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class FlushAction extends ActionType { +public class FlushAction extends ActionType { public static final FlushAction INSTANCE = new FlushAction(); public static final String NAME = "indices:admin/flush"; private FlushAction() { - super(NAME, FlushResponse::new); + super(NAME, BroadcastResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java index 64485ad0d4496..fc326f804ce8a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequest.java @@ -23,7 +23,6 @@ * memory heuristics in order to automatically trigger flush operations as required in order to clear memory. * * @see org.elasticsearch.client.internal.IndicesAdminClient#flush(FlushRequest) - * @see FlushResponse */ public class FlushRequest extends BroadcastRequest { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java index 4e474732e3bad..f23e247428698 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushRequestBuilder.java @@ -9,9 +9,10 @@ package org.elasticsearch.action.admin.indices.flush; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; -public class FlushRequestBuilder extends BroadcastOperationRequestBuilder { +public class FlushRequestBuilder extends BroadcastOperationRequestBuilder { public FlushRequestBuilder(ElasticsearchClient client) { super(client, FlushAction.INSTANCE, new FlushRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java deleted file mode 100644 index 0a037ebe09f8a..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushResponse.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.flush; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * A response to flush action. - */ -public class FlushResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("flush", true, arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new FlushResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - }); - - static { - declareBroadcastFields(PARSER); - } - - FlushResponse(StreamInput in) throws IOException { - super(in); - } - - FlushResponse(int totalShards, int successfulShards, int failedShards, List shardFailures) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static FlushResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index ade775db9c755..96b4a0191b10c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -28,7 +29,7 @@ */ public class TransportFlushAction extends TransportBroadcastReplicationAction< FlushRequest, - FlushResponse, + BroadcastResponse, ShardFlushRequest, ReplicationResponse> { @@ -59,12 +60,12 @@ protected ShardFlushRequest newShardRequest(FlushRequest request, ShardId shardI } @Override - protected FlushResponse newResponse( + protected BroadcastResponse newResponse( int successfulShards, int failedShards, int totalNumCopies, List shardFailures ) { - return new FlushResponse(totalNumCopies, successfulShards, failedShards, shardFailures); + return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java index 3ab30298a57f5..1270365cded0d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class ForceMergeAction extends ActionType { +public class ForceMergeAction extends ActionType { public static final ForceMergeAction INSTANCE = new ForceMergeAction(); public static final String NAME = "indices:admin/forcemerge"; private ForceMergeAction() { - super(NAME, ForceMergeResponse::new); + super(NAME, BroadcastResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java index 241f1a0c7fbf6..37075dd896b80 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java @@ -30,7 +30,6 @@ * to execute, and if so, executes it * * @see org.elasticsearch.client.internal.IndicesAdminClient#forceMerge(ForceMergeRequest) - * @see ForceMergeResponse */ public class ForceMergeRequest extends BroadcastRequest { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java index 835749751f4a6..d4c15ee799670 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequestBuilder.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.indices.forcemerge; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; /** @@ -20,7 +21,7 @@ */ public class ForceMergeRequestBuilder extends BroadcastOperationRequestBuilder< ForceMergeRequest, - ForceMergeResponse, + BroadcastResponse, ForceMergeRequestBuilder> { public ForceMergeRequestBuilder(ElasticsearchClient client) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java deleted file mode 100644 index 3853a944e8676..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponse.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.forcemerge; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * A response for force merge action. - */ -public class ForceMergeResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "force_merge", - true, - arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new ForceMergeResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - } - ); - - static { - declareBroadcastFields(PARSER); - } - - ForceMergeResponse(StreamInput in) throws IOException { - super(in); - } - - public ForceMergeResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static ForceMergeResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java index a70498695e149..df98e8f12f18e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; @@ -35,7 +36,7 @@ */ public class TransportForceMergeAction extends TransportBroadcastByNodeAction< ForceMergeRequest, - ForceMergeResponse, + BroadcastResponse, TransportBroadcastByNodeAction.EmptyResult> { private final IndicesService indicesService; @@ -68,8 +69,8 @@ protected EmptyResult readShardResult(StreamInput in) throws IOException { } @Override - protected ResponseFactory getResponseFactory(ForceMergeRequest request, ClusterState clusterState) { - return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new ForceMergeResponse( + protected ResponseFactory getResponseFactory(ForceMergeRequest request, ClusterState clusterState) { + return (totalShards, successfulShards, failedShards, responses, shardFailures) -> new BroadcastResponse( totalShards, successfulShards, failedShards, diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java index 7d9ca67b9fa9e..f094ff75d9c41 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java @@ -9,13 +9,14 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class RefreshAction extends ActionType { +public class RefreshAction extends ActionType { public static final RefreshAction INSTANCE = new RefreshAction(); public static final String NAME = "indices:admin/refresh"; private RefreshAction() { - super(NAME, RefreshResponse::new); + super(NAME, BroadcastResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java index d0f9e99fd08ec..1f703e59980d6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequest.java @@ -19,7 +19,6 @@ * default a refresh is scheduled periodically. * * @see org.elasticsearch.client.internal.IndicesAdminClient#refresh(RefreshRequest) - * @see RefreshResponse */ public class RefreshRequest extends BroadcastRequest { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java index 51d569dac0c30..c503ff6ca6930 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshRequestBuilder.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.indices.refresh; import org.elasticsearch.action.support.broadcast.BroadcastOperationRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ElasticsearchClient; /** @@ -16,7 +17,7 @@ * capabilities depends on the index engine used. For example, the internal one requires refresh to be called, but by * default a refresh is scheduled periodically. */ -public class RefreshRequestBuilder extends BroadcastOperationRequestBuilder { +public class RefreshRequestBuilder extends BroadcastOperationRequestBuilder { public RefreshRequestBuilder(ElasticsearchClient client) { super(client, RefreshAction.INSTANCE, new RefreshRequest()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java deleted file mode 100644 index 5669591a17dc7..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponse.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.refresh; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; - -/** - * The response of a refresh action. - */ -public class RefreshResponse extends BroadcastResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("refresh", true, arg -> { - BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; - return new RefreshResponse( - response.getTotalShards(), - response.getSuccessfulShards(), - response.getFailedShards(), - Arrays.asList(response.getShardFailures()) - ); - }); - - static { - declareBroadcastFields(PARSER); - } - - RefreshResponse(StreamInput in) throws IOException { - super(in); - } - - public RefreshResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } - - public static RefreshResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java index 7537e74e2c780..5d6f60216ae05 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportRefreshAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.support.replication.TransportBroadcastReplicationAction; @@ -29,7 +30,7 @@ */ public class TransportRefreshAction extends TransportBroadcastReplicationAction< RefreshRequest, - RefreshResponse, + BroadcastResponse, BasicReplicationRequest, ReplicationResponse> { @@ -62,12 +63,12 @@ protected BasicReplicationRequest newShardRequest(RefreshRequest request, ShardI } @Override - protected RefreshResponse newResponse( + protected BroadcastResponse newResponse( int successfulShards, int failedShards, int totalNumCopies, List shardFailures ) { - return new RefreshResponse(totalNumCopies, successfulShards, failedShards, shardFailures); + return new BroadcastResponse(totalNumCopies, successfulShards, failedShards, shardFailures); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java index dc26e0380fe72..aa838e473bd29 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java @@ -9,14 +9,15 @@ package org.elasticsearch.action.admin.indices.shrink; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -public class ResizeAction extends ActionType { +public class ResizeAction extends ActionType { public static final ResizeAction INSTANCE = new ResizeAction(); public static final String NAME = "indices:admin/resize"; private ResizeAction() { - super(NAME, ResizeResponse::new); + super(NAME, CreateIndexResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index 71270cd61b9ed..c39d2e1114618 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -152,7 +153,7 @@ public String getSourceIndex() { * non-negative integer, up to the number of copies per shard (number of replicas + 1), * to wait for the desired amount of shard copies to become active before returning. * Index creation will only wait up until the timeout value for the number of shard copies - * to be active before returning. Check {@link ResizeResponse#isShardsAcknowledged()} to + * to be active before returning. Check {@link CreateIndexResponse#isShardsAcknowledged()} to * determine if the requisite shard copies were all started before returning or timing out. * * @param waitForActiveShards number of active shard copies to wait on diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java index a4972d1a98e7d..a18de15037e49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java @@ -8,13 +8,14 @@ package org.elasticsearch.action.admin.indices.shrink; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -public class ResizeRequestBuilder extends AcknowledgedRequestBuilder { +public class ResizeRequestBuilder extends AcknowledgedRequestBuilder { public ResizeRequestBuilder(ElasticsearchClient client) { super(client, ResizeAction.INSTANCE, new ResizeRequest()); } @@ -43,7 +44,7 @@ public ResizeRequestBuilder setSettings(Settings settings) { * non-negative integer, up to the number of copies per shard (number of replicas + 1), * to wait for the desired amount of shard copies to become active before returning. * Index creation will only wait up until the timeout value for the number of shard copies - * to be active before returning. Check {@link ResizeResponse#isShardsAcknowledged()} to + * to be active before returning. Check {@link CreateIndexResponse#isShardsAcknowledged()} to * determine if the requisite shard copies were all started before returning or timing out. * * @param waitForActiveShards number of active shard copies to wait on diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponse.java deleted file mode 100644 index 768fc18397519..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponse.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.shrink; - -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -/** - * A response for a resize index action, either shrink or split index. - */ -public final class ResizeResponse extends CreateIndexResponse { - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "resize_index", - true, - args -> new ResizeResponse((boolean) args[0], (boolean) args[1], (String) args[2]) - ); - - static { - declareFields(PARSER); - } - - ResizeResponse(StreamInput in) throws IOException { - super(in); - } - - public ResizeResponse(boolean acknowledged, boolean shardsAcknowledged, String index) { - super(acknowledged, shardsAcknowledged, index); - } - - public static ResizeResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java index 8ce69309cf59d..7df58990b69ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java @@ -9,14 +9,15 @@ package org.elasticsearch.action.admin.indices.shrink; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -public class ShrinkAction extends ActionType { +public class ShrinkAction extends ActionType { public static final ShrinkAction INSTANCE = new ShrinkAction(); public static final String NAME = "indices:admin/shrink"; private ShrinkAction() { - super(NAME, ResizeResponse::new); + super(NAME, CreateIndexResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java index 5686deb6b804a..fbae64dcb6d45 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; @@ -41,7 +42,7 @@ /** * Main class to initiate resizing (shrink / split) an index into a new index */ -public class TransportResizeAction extends TransportMasterNodeAction { +public class TransportResizeAction extends TransportMasterNodeAction { private final MetadataCreateIndexService createIndexService; private final Client client; @@ -86,7 +87,7 @@ protected TransportResizeAction( actionFilters, ResizeRequest::new, indexNameExpressionResolver, - ResizeResponse::new, + CreateIndexResponse::new, EsExecutors.DIRECT_EXECUTOR_SERVICE ); this.createIndexService = createIndexService; @@ -103,7 +104,7 @@ protected void masterOperation( Task task, final ResizeRequest resizeRequest, final ClusterState state, - final ActionListener listener + final ActionListener listener ) { // there is no need to fetch docs stats for split but we keep it simple and do it anyway for simplicity of the code @@ -136,7 +137,11 @@ protected void masterOperation( createIndexService.createIndex( updateRequest, delegatedListener.map( - response -> new ResizeResponse(response.isAcknowledged(), response.isShardsAcknowledged(), updateRequest.index()) + response -> new CreateIndexResponse( + response.isAcknowledged(), + response.isShardsAcknowledged(), + updateRequest.index() + ) ) ); }) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index d0da715b17168..b6345ed0fce4a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -48,7 +48,7 @@ public class CommonStats implements Writeable, ToXContentFragment { private static final TransportVersion VERSION_SUPPORTING_NODE_MAPPINGS = TransportVersions.V_8_5_0; - private static final TransportVersion VERSION_SUPPORTING_DENSE_VECTOR_STATS = TransportVersions.V_8_500_061; + private static final TransportVersion VERSION_SUPPORTING_DENSE_VECTOR_STATS = TransportVersions.V_8_10_X; @Nullable public DocsStats docs; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardRequest.java deleted file mode 100644 index 1c3f9672f712c..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/FieldUsageShardRequest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.stats; - -import org.elasticsearch.action.support.broadcast.BroadcastShardRequest; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.tasks.CancellableTask; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Map; - -public class FieldUsageShardRequest extends BroadcastShardRequest { - - private final String[] fields; - - FieldUsageShardRequest(ShardId shardId, FieldUsageStatsRequest request) { - super(shardId, request); - this.fields = request.fields(); - } - - FieldUsageShardRequest(StreamInput in) throws IOException { - super(in); - this.fields = in.readStringArray(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(fields); - } - - @Override - public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new CancellableTask(id, type, action, "", parentTaskId, headers) { - @Override - public String getDescription() { - return FieldUsageShardRequest.this.getDescription(); - } - }; - } - - @Override - public String getDescription() { - return "get field usage for shard: [" + shardId() + "], fields: " + Arrays.toString(fields); - } - - public String[] fields() { - return fields; - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index f90dc894f1b57..477a0bd910719 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -61,7 +61,7 @@ public ShardStats(StreamInput in) throws IOException { isCustomDataPath = in.readBoolean(); seqNoStats = in.readOptionalWriteable(SeqNoStats::new); retentionLeaseStats = in.readOptionalWriteable(RetentionLeaseStats::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { isSearchIdle = in.readBoolean(); searchIdleTime = in.readVLong(); } else { @@ -215,7 +215,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(isCustomDataPath); out.writeOptionalWriteable(seqNoStats); out.writeOptionalWriteable(retentionLeaseStats); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(isSearchIdle); out.writeVLong(searchIdleTime); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index ae73904a8447b..1e0a36cfc1a99 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -57,7 +57,7 @@ public Request(String name) { public Request(StreamInput in) throws IOException { super(in); name = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { includeDefaults = in.readBoolean(); } else { includeDefaults = false; @@ -68,7 +68,7 @@ public Request(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(name); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } } @@ -121,7 +121,7 @@ public static class Response extends ActionResponse implements ToXContentObject public Response(StreamInput in) throws IOException { super(in); componentTemplates = in.readMap(ComponentTemplate::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); } else { rolloverConfiguration = null; @@ -149,7 +149,7 @@ public RolloverConfiguration getRolloverConfiguration() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(componentTemplates, StreamOutput::writeWriteable); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index f2c041c2c71bc..8401a510a1482 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -58,7 +58,7 @@ public Request(@Nullable String name) { public Request(StreamInput in) throws IOException { super(in); name = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { includeDefaults = in.readBoolean(); } else { includeDefaults = false; @@ -69,7 +69,7 @@ public Request(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(name); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } } @@ -123,7 +123,7 @@ public static class Response extends ActionResponse implements ToXContentObject public Response(StreamInput in) throws IOException { super(in); indexTemplates = in.readMap(ComposableIndexTemplate::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); } else { rolloverConfiguration = null; @@ -147,7 +147,7 @@ public Map indexTemplates() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeMap(indexTemplates, StreamOutput::writeWriteable); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java index 6b71be3925478..9281c6d3dd0bc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateRequest.java @@ -40,7 +40,7 @@ public SimulateIndexTemplateRequest(StreamInput in) throws IOException { super(in); indexName = in.readString(); indexTemplateRequest = in.readOptionalWriteable(TransportPutComposableIndexTemplateAction.Request::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { includeDefaults = in.readBoolean(); } } @@ -50,7 +50,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(indexName); out.writeOptionalWriteable(indexTemplateRequest); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java index b7cc8564be062..106f1a7e4f393 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateIndexTemplateResponse.java @@ -73,7 +73,7 @@ public SimulateIndexTemplateResponse(StreamInput in) throws IOException { } else { this.overlappingTemplates = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { rolloverConfiguration = in.readOptionalWriteable(RolloverConfiguration::new); } } @@ -91,7 +91,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeBoolean(false); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java index 7f637527a6a1f..a1148695ba6d6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/post/SimulateTemplateAction.java @@ -63,7 +63,7 @@ public Request(StreamInput in) throws IOException { super(in); templateName = in.readOptionalString(); indexTemplateRequest = in.readOptionalWriteable(TransportPutComposableIndexTemplateAction.Request::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { includeDefaults = in.readBoolean(); } } @@ -73,7 +73,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(templateName); out.writeOptionalWriteable(indexTemplateRequest); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 073ac021f787a..f591cc22d19a8 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -75,7 +75,7 @@ public Request(StreamInput in) throws IOException { super(in); this.names = in.readOptionalStringArray(); this.indicesOptions = IndicesOptions.readIndicesOptions(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.includeDefaults = in.readBoolean(); } else { this.includeDefaults = false; @@ -87,7 +87,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalStringArray(names); indicesOptions.writeIndicesOptions(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(includeDefaults); } } @@ -481,9 +481,7 @@ public Response(List dataStreams, @Nullable RolloverConfiguratio public Response(StreamInput in) throws IOException { this( in.readCollectionAsList(DataStreamInfo::new), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) - ? in.readOptionalWriteable(RolloverConfiguration::new) - : null + in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(RolloverConfiguration::new) : null ); } @@ -499,7 +497,7 @@ public RolloverConfiguration getRolloverConfiguration() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeCollection(dataStreams); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(rolloverConfiguration); } } diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index 61c979f9494b5..5875ab5089d92 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -62,7 +62,7 @@ public Request(StreamInput in) throws IOException { super(in); sourceIndex = in.readString(); targetIndex = in.readString(); - waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) + waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) ? TimeValue.parseTimeValue(in.readString(), "timeout") : DEFAULT_WAIT_TIMEOUT; downsampleConfig = new DownsampleConfig(in); @@ -89,7 +89,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(sourceIndex); out.writeString(targetIndex); out.writeString( - out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) + out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) ? waitTimeout.getStringRep() : DEFAULT_WAIT_TIMEOUT.getStringRep() ); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 12f7c21cba8e1..285346adcd13f 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -43,7 +43,6 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -73,7 +72,7 @@ public class IndexRequest extends ReplicatedWriteRequest implements DocWriteRequest, CompositeIndicesRequest { private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(IndexRequest.class); - private static final TransportVersion PIPELINES_HAVE_RUN_FIELD_ADDED = TransportVersions.V_8_500_061; + private static final TransportVersion PIPELINES_HAVE_RUN_FIELD_ADDED = TransportVersions.V_8_10_X; /** * Max length of the source document to include into string() @@ -266,17 +265,7 @@ public ActionRequestValidationException validate() { validationException = DocWriteRequest.validateSeqNoBasedCASParams(this, validationException); - if (id != null && id.getBytes(StandardCharsets.UTF_8).length > MAX_DOCUMENT_ID_LENGTH_IN_BYTES) { - validationException = addValidationError( - "id [" - + id - + "] is too long, must be no longer than " - + MAX_DOCUMENT_ID_LENGTH_IN_BYTES - + " bytes but was: " - + id.getBytes(StandardCharsets.UTF_8).length, - validationException - ); - } + validationException = DocWriteRequest.validateDocIdLength(id, validationException); if (pipeline != null && pipeline.isEmpty()) { validationException = addValidationError("pipeline cannot be an empty string", validationException); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 591b9a86cda20..1da114adb34f6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -172,7 +172,7 @@ abstract class AbstractSearchAsyncAction exten this.results = resultConsumer; // register the release of the query consumer to free up the circuit breaker memory // at the end of the search - addReleasable(resultConsumer::decRef); + addReleasable(resultConsumer); this.clusters = clusters; } diff --git a/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java b/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java index b4fd0107f731f..96f10d7d8a30e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java +++ b/server/src/main/java/org/elasticsearch/action/search/ArraySearchPhaseResults.java @@ -9,11 +9,11 @@ package org.elasticsearch.action.search; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.core.AbstractRefCounted; -import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.transport.LeakTracker; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Stream; /** @@ -22,7 +22,13 @@ class ArraySearchPhaseResults extends SearchPhaseResults { final AtomicArray results; - private final RefCounted refCounted = LeakTracker.wrap(AbstractRefCounted.of(this::doClose)); + private final AtomicBoolean closed = new AtomicBoolean(false); + + private final Releasable releasable = LeakTracker.wrap(() -> { + for (Result result : getAtomicArray().asList()) { + result.decRef(); + } + }); ArraySearchPhaseResults(int size) { super(size); @@ -41,12 +47,16 @@ void consumeResult(Result result, Runnable next) { next.run(); } - protected void doClose() { - for (Result result : getAtomicArray().asList()) { - result.decRef(); + @Override + public final void close() { + if (closed.compareAndSet(false, true)) { + releasable.close(); + doClose(); } } + protected void doClose() {} + boolean hasResult(int shardIndex) { return results.get(shardIndex) != null; } @@ -55,24 +65,4 @@ boolean hasResult(int shardIndex) { AtomicArray getAtomicArray() { return results; } - - @Override - public void incRef() { - refCounted.incRef(); - } - - @Override - public boolean tryIncRef() { - return refCounted.tryIncRef(); - } - - @Override - public boolean decRef() { - return refCounted.decRef(); - } - - @Override - public boolean hasReferences() { - return refCounted.hasReferences(); - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index 9900ee9d824ae..52f41179795d6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -482,24 +482,7 @@ Stream getSuccessfulResults() { } @Override - public void incRef() { - - } - - @Override - public boolean tryIncRef() { - return false; - } - - @Override - public boolean decRef() { - return false; - } - - @Override - public boolean hasReferences() { - return false; - } + public void close() {} } private GroupShardsIterator getIterator( diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java index 13972ea2bf64a..2c4cb31584323 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -104,20 +104,5 @@ AtomicArray getAtomicArray() { } @Override - public void incRef() {} - - @Override - public boolean tryIncRef() { - return true; - } - - @Override - public boolean decRef() { - return true; - } - - @Override - public boolean hasReferences() { - return false; - } + public void close() {} } diff --git a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java index 3a12b72570caf..0e6830dcfab0e 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountedCollector.java @@ -25,7 +25,6 @@ final class CountedCollector { CountedCollector(SearchPhaseResults resultConsumer, int expectedOps, Runnable onFinish, SearchPhaseContext context) { this.resultConsumer = resultConsumer; - resultConsumer.incRef(); this.counter = new CountDown(expectedOps); this.onFinish = onFinish; this.context = context; @@ -38,11 +37,7 @@ final class CountedCollector { void countDown() { assert counter.isCountedDown() == false : "more operations executed than specified"; if (counter.countDown()) { - try { - onFinish.run(); - } finally { - resultConsumer.decRef(); - } + onFinish.run(); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 54408cd560314..0c9d6ba12a27a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -66,7 +66,7 @@ final class DfsQueryPhase extends SearchPhase { // register the release of the query consumer to free up the circuit breaker memory // at the end of the search - context.addReleasable(queryResult::decRef); + context.addReleasable(queryResult); } @Override @@ -151,7 +151,11 @@ ShardSearchRequest rewriteShardSearchRequest(ShardSearchRequest request) { } scoreDocs.sort(Comparator.comparingInt(scoreDoc -> scoreDoc.doc)); String nestedPath = dfsKnnResults.getNestedPath(); - QueryBuilder query = new KnnScoreDocQueryBuilder(scoreDocs.toArray(new ScoreDoc[0])); + QueryBuilder query = new KnnScoreDocQueryBuilder( + scoreDocs.toArray(new ScoreDoc[0]), + source.knnSearch().get(i).getField(), + source.knnSearch().get(i).getQueryVector() + ).boost(source.knnSearch().get(i).boost()); if (nestedPath != null) { query = new NestedQueryBuilder(nestedPath, query, ScoreMode.Max).innerHit(source.knnSearch().get(i).innerHit()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 00e2b41fde3da..7741c1483f69a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -96,6 +96,7 @@ public void run() { hit.setInnerHits(Maps.newMapWithExpectedSize(innerHitBuilders.size())); } hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); + innerHits.mustIncRef(); } } onPhaseDone(); diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 11528f8e1521f..1f06158951392 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -38,11 +38,16 @@ final class FetchSearchPhase extends SearchPhase { private final AggregatedDfs aggregatedDfs; FetchSearchPhase(SearchPhaseResults resultConsumer, AggregatedDfs aggregatedDfs, SearchPhaseContext context) { - this(resultConsumer, aggregatedDfs, context, (response, queryPhaseResults) -> { - response.mustIncRef(); - context.addReleasable(response::decRef); - return new ExpandSearchPhase(context, response.hits, () -> new FetchLookupFieldsPhase(context, response, queryPhaseResults)); - }); + this( + resultConsumer, + aggregatedDfs, + context, + (response, queryPhaseResults) -> new ExpandSearchPhase( + context, + response.hits, + () -> new FetchLookupFieldsPhase(context, response, queryPhaseResults) + ) + ); } FetchSearchPhase( @@ -61,7 +66,7 @@ final class FetchSearchPhase extends SearchPhase { ); } this.fetchResults = new ArraySearchPhaseResults<>(resultConsumer.getNumShards()); - context.addReleasable(fetchResults::decRef); + context.addReleasable(fetchResults); this.queryResults = resultConsumer.getAtomicArray(); this.aggregatedDfs = aggregatedDfs; this.nextPhaseFactory = nextPhaseFactory; @@ -225,10 +230,8 @@ private void moveToNextPhase( AtomicArray fetchResultsArr ) { var resp = SearchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr); - try { - context.executeNextPhase(this, nextPhaseFactory.apply(resp, queryResults)); - } finally { - resp.decRef(); - } + context.addReleasable(resp::decRef); + fetchResults.close(); + context.executeNextPhase(this, nextPhaseFactory.apply(resp, queryResults)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java index 39813a883c428..874437311d086 100644 --- a/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/OpenPointInTimeRequest.java @@ -54,7 +54,7 @@ public OpenPointInTimeRequest(StreamInput in) throws IOException { this.keepAlive = in.readTimeValue(); this.routing = in.readOptionalString(); this.preference = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.maxConcurrentShardRequests = in.readVInt(); } if (in.getTransportVersion().onOrAfter(TransportVersions.PIT_WITH_INDEX_FILTER)) { @@ -70,7 +70,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeTimeValue(keepAlive); out.writeOptionalString(routing); out.writeOptionalString(preference); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeVInt(maxConcurrentShardRequests); } if (out.getTransportVersion().onOrAfter(TransportVersions.PIT_WITH_INDEX_FILTER)) { diff --git a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java index b7b113601560b..34ee0fc146aa5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java @@ -105,11 +105,7 @@ public QueryPhaseResultConsumer( @Override protected void doClose() { - try { - super.doClose(); - } finally { - pendingMerges.close(); - } + pendingMerges.close(); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 6fcfc97c33c9e..fcc848384866a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -68,7 +68,7 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction clusters ); this.queryPhaseResultConsumer = queryPhaseResultConsumer; - addReleasable(queryPhaseResultConsumer::decRef); + addReleasable(queryPhaseResultConsumer); this.progressListener = task.getProgressListener(); // don't build the SearchShard list (can be expensive) if the SearchProgressListener won't use it if (progressListener != SearchProgressListener.NOOP) { @@ -95,7 +95,6 @@ protected SearchPhase getNextPhase(final SearchPhaseResults res final List dfsSearchResults = results.getAtomicArray().asList(); final AggregatedDfs aggregatedDfs = SearchPhaseController.aggregateDfs(dfsSearchResults); final List mergedKnnResults = SearchPhaseController.mergeKnnResults(getRequest(), dfsSearchResults); - queryPhaseResultConsumer.incRef(); return new DfsQueryPhase( dfsSearchResults, aggregatedDfs, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 5ffb9024d3ee1..6cfea93068a86 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -364,11 +364,15 @@ public static SearchResponseSections merge( } ScoreDoc[] sortedDocs = reducedQueryPhase.sortedTopDocs.scoreDocs; var fetchResults = fetchResultsArray.asList(); - SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, fetchResultsArray); - if (reducedQueryPhase.suggest != null && fetchResults.isEmpty() == false) { - mergeSuggest(reducedQueryPhase, fetchResultsArray, hits, sortedDocs); + final SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, fetchResultsArray); + try { + if (reducedQueryPhase.suggest != null && fetchResults.isEmpty() == false) { + mergeSuggest(reducedQueryPhase, fetchResultsArray, hits, sortedDocs); + } + return reducedQueryPhase.buildResponse(hits, fetchResults); + } finally { + hits.decRef(); } - return reducedQueryPhase.buildResponse(hits, fetchResults); } private static void mergeSuggest( @@ -462,6 +466,7 @@ private static SearchHits getHits( searchHit.score(shardDoc.score); } hits.add(searchHit); + searchHit.incRef(); } } return new SearchHits( diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java index 11b8e0a0792a3..28606ecc09f90 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseResults.java @@ -9,7 +9,7 @@ package org.elasticsearch.action.search; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; import org.elasticsearch.search.SearchPhaseResult; import java.util.stream.Stream; @@ -17,7 +17,7 @@ /** * This class acts as a basic result collection that can be extended to do on-the-fly reduction or result processing */ -abstract class SearchPhaseResults implements RefCounted { +abstract class SearchPhaseResults implements Releasable { private final int numShards; SearchPhaseResults(int numShards) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 660fdb38b130b..84b9dc745ed92 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -20,7 +20,9 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; @@ -30,6 +32,7 @@ import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -81,9 +84,16 @@ public class SearchResponse extends ActionResponse implements ChunkedToXContentO private final Clusters clusters; private final long tookInMillis; + private final RefCounted refCounted = LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + hits.decRef(); + } + }); + public SearchResponse(StreamInput in) throws IOException { super(in); - this.hits = new SearchHits(in); + this.hits = SearchHits.readFrom(in, true); this.aggregations = in.readBoolean() ? InternalAggregations.readFrom(in) : null; this.suggest = in.readBoolean() ? new Suggest(in) : null; this.timedOut = in.readBoolean(); @@ -191,6 +201,7 @@ public SearchResponse( String pointInTimeId ) { this.hits = hits; + hits.incRef(); this.aggregations = aggregations; this.suggest = suggest; this.profileResults = profileResults; @@ -210,6 +221,26 @@ public SearchResponse( : "SearchResponse can't have both scrollId [" + scrollId + "] and searchContextId [" + pointInTimeId + "]"; } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + public RestStatus status() { return RestStatus.status(successfulShards, totalShards, shardFailures); } @@ -218,6 +249,7 @@ public RestStatus status() { * The search hits. */ public SearchHits getHits() { + assert hasReferences(); return hits; } @@ -344,6 +376,7 @@ public Clusters getClusters() { @Override public Iterator toXContentChunked(ToXContent.Params params) { + assert hasReferences(); return Iterators.concat( ChunkedToXContentHelper.startObject(), this.innerToXContentChunked(params), @@ -493,6 +526,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE } } } + return new SearchResponse( hits, aggs, @@ -514,6 +548,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE @Override public void writeTo(StreamOutput out) throws IOException { + assert hasReferences(); hits.writeTo(out); out.writeOptionalWriteable((InternalAggregations) aggregations); out.writeOptionalWriteable(suggest); @@ -537,7 +572,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String toString() { - return Strings.toString(this); + return hasReferences() == false ? "SearchResponse[released]" : Strings.toString(this); } /** @@ -632,7 +667,7 @@ public Clusters(StreamInput in) throws IOException { this.total = in.readVInt(); int successfulTemp = in.readVInt(); int skippedTemp = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { List clusterList = in.readCollectionAsList(Cluster::new); if (clusterList.isEmpty()) { this.clusterInfo = Collections.emptyMap(); @@ -685,7 +720,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(total); out.writeVInt(successful); out.writeVInt(skipped); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { if (clusterInfo != null) { List clusterList = clusterInfo.values().stream().toList(); out.writeCollection(clusterList); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index 0586cbb9046dc..9db9d65bc3dac 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -204,33 +204,37 @@ public SearchResponse getMergedResponse(Clusters clusters) { setTopDocsShardIndex(shards, topDocsList); TopDocs topDocs = mergeTopDocs(topDocsList, size, from); SearchHits mergedSearchHits = topDocsToSearchHits(topDocs, topDocsStats); - setSuggestShardIndex(shards, groupedSuggestions); - Suggest suggest = groupedSuggestions.isEmpty() ? null : new Suggest(Suggest.reduce(groupedSuggestions)); - InternalAggregations reducedAggs = aggs.isEmpty() - ? InternalAggregations.EMPTY - : InternalAggregations.topLevelReduce(aggs, aggReduceContextBuilder.forFinalReduction()); - ShardSearchFailure[] shardFailures = failures.toArray(ShardSearchFailure.EMPTY_ARRAY); - SearchProfileResults profileShardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults); - // make failures ordering consistent between ordinary search and CCS by looking at the shard they come from - Arrays.sort(shardFailures, FAILURES_COMPARATOR); - long tookInMillis = searchTimeProvider.buildTookInMillis(); - return new SearchResponse( - mergedSearchHits, - reducedAggs, - suggest, - topDocsStats.timedOut, - topDocsStats.terminatedEarly, - profileShardResults, - numReducePhases, - null, - totalShards, - successfulShards, - skippedShards, - tookInMillis, - shardFailures, - clusters, - null - ); + try { + setSuggestShardIndex(shards, groupedSuggestions); + Suggest suggest = groupedSuggestions.isEmpty() ? null : new Suggest(Suggest.reduce(groupedSuggestions)); + InternalAggregations reducedAggs = aggs.isEmpty() + ? InternalAggregations.EMPTY + : InternalAggregations.topLevelReduce(aggs, aggReduceContextBuilder.forFinalReduction()); + ShardSearchFailure[] shardFailures = failures.toArray(ShardSearchFailure.EMPTY_ARRAY); + SearchProfileResults profileShardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults); + // make failures ordering consistent between ordinary search and CCS by looking at the shard they come from + Arrays.sort(shardFailures, FAILURES_COMPARATOR); + long tookInMillis = searchTimeProvider.buildTookInMillis(); + return new SearchResponse( + mergedSearchHits, + reducedAggs, + suggest, + topDocsStats.timedOut, + topDocsStats.terminatedEarly, + profileShardResults, + numReducePhases, + null, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardFailures, + clusters, + null + ); + } finally { + mergedSearchHits.decRef(); + } } private static final Comparator FAILURES_COMPARATOR = new Comparator() { @@ -376,6 +380,7 @@ private static SearchHits topDocsToSearchHits(TopDocs topDocs, TopDocsStats topD for (int i = 0; i < topDocs.scoreDocs.length; i++) { FieldDocAndSearchHit scoreDoc = (FieldDocAndSearchHit) topDocs.scoreDocs[i]; searchHits[i] = scoreDoc.searchHit; + scoreDoc.searchHit.mustIncRef(); } } SortField[] sortFields = null; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index 805ef033db27a..d52a585b3e792 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -64,6 +64,7 @@ public SearchResponseSections( int numReducePhases ) { this.hits = hits; + hits.incRef(); this.aggregations = aggregations; this.suggest = suggest; this.profileResults = profileResults; @@ -73,7 +74,7 @@ public SearchResponseSections( refCounted = hits.getHits().length > 0 ? LeakTracker.wrap(new AbstractRefCounted() { @Override protected void closeInternal() { - // TODO: noop until hits are ref counted + hits.decRef(); } }) : ALWAYS_REFERENCED; } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 9c80e55a6f49d..e42ac1f4794ff 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -716,7 +716,7 @@ Map createFinalResponse() { final String[] indices = entry.getValue().indices(); final Executor responseExecutor = transportService.getThreadPool().executor(ThreadPool.Names.SEARCH_COORDINATION); // TODO: support point-in-time - if (searchContext == null && connection.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (searchContext == null && connection.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { SearchShardsRequest searchShardsRequest = new SearchShardsRequest( indices, indicesOptions, diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java index 52b4c00175fa8..b69b87190f2a7 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BaseBroadcastResponse.java @@ -42,7 +42,7 @@ public class BaseBroadcastResponse extends ActionResponse { private final DefaultShardOperationFailedException[] shardFailures; @SuppressWarnings("unchecked") - protected static void declareBroadcastFields(ConstructingObjectParser PARSER) { + public static void declareBroadcastFields(ConstructingObjectParser PARSER) { ConstructingObjectParser shardsParser = new ConstructingObjectParser<>( "_shards", true, diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java index bf8376cfc5481..312a9843c9e2b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/unpromotable/BroadcastUnpromotableRequest.java @@ -46,7 +46,7 @@ public BroadcastUnpromotableRequest(StreamInput in) throws IOException { indexShardRoutingTable = null; shardId = new ShardId(in); indices = new String[] { shardId.getIndex().getName() }; - failShardOnError = in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) && in.readBoolean(); + failShardOnError = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) && in.readBoolean(); } public BroadcastUnpromotableRequest(IndexShardRoutingTable indexShardRoutingTable) { diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index 600790b2fd841..800eca618c5bc 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -183,6 +183,8 @@ public ActionRequestValidationException validate() { validationException = DocWriteRequest.validateSeqNoBasedCASParams(this, validationException); + validationException = DocWriteRequest.validateDocIdLength(id, validationException); + if (ifSeqNo != UNASSIGNED_SEQ_NO) { if (retryOnConflict > 0) { validationException = addValidationError("compare and write operations can not be retried", validationException); diff --git a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java index a8365a62c9e58..8e9977696bc18 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequestBuilder; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse; @@ -293,7 +292,7 @@ public interface ClusterAdminClient extends ElasticsearchClient { * @param request The nodes tasks request * @return The result future */ - ActionFuture cancelTasks(CancelTasksRequest request); + ActionFuture cancelTasks(CancelTasksRequest request); /** * Cancel active tasks @@ -301,7 +300,7 @@ public interface ClusterAdminClient extends ElasticsearchClient { * @param request The nodes tasks request * @param listener A listener to be notified with a result */ - void cancelTasks(CancelTasksRequest request, ActionListener listener); + void cancelTasks(CancelTasksRequest request, ActionListener listener); /** * Cancel active tasks diff --git a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java index 9ba26b95244ab..d931302740f19 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/IndicesAdminClient.java @@ -19,7 +19,6 @@ import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; @@ -30,10 +29,8 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; @@ -56,7 +53,6 @@ import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.resolve.ResolveIndexAction; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder; @@ -71,7 +67,6 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeRequestBuilder; -import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; @@ -85,6 +80,7 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequestBuilder; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock; import org.elasticsearch.core.Nullable; @@ -261,7 +257,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The refresh request * @return The result future */ - ActionFuture refresh(RefreshRequest request); + ActionFuture refresh(RefreshRequest request); /** * Explicitly refresh one or more indices (making the content indexed since the last refresh searchable). @@ -269,7 +265,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The refresh request * @param listener A listener to be notified with a result */ - void refresh(RefreshRequest request, ActionListener listener); + void refresh(RefreshRequest request, ActionListener listener); /** * Explicitly refresh one or more indices (making the content indexed since the last refresh searchable). @@ -282,7 +278,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The flush request * @return A result future */ - ActionFuture flush(FlushRequest request); + ActionFuture flush(FlushRequest request); /** * Explicitly flush one or more indices (releasing memory from the node). @@ -290,7 +286,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The flush request * @param listener A listener to be notified with a result */ - void flush(FlushRequest request, ActionListener listener); + void flush(FlushRequest request, ActionListener listener); /** * Explicitly flush one or more indices (releasing memory from the node). @@ -303,7 +299,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The optimize request * @return A result future */ - ActionFuture forceMerge(ForceMergeRequest request); + ActionFuture forceMerge(ForceMergeRequest request); /** * Explicitly force merge one or more indices into a the number of segments. @@ -311,7 +307,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The force merge request * @param listener A listener to be notified with a result */ - void forceMerge(ForceMergeRequest request, ActionListener listener); + void forceMerge(ForceMergeRequest request, ActionListener listener); /** * Explicitly force merge one or more indices into a the number of segments. @@ -436,7 +432,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The clear indices cache request * @return The result future */ - ActionFuture clearCache(ClearIndicesCacheRequest request); + ActionFuture clearCache(ClearIndicesCacheRequest request); /** * Clear indices cache. @@ -444,7 +440,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { * @param request The clear indices cache request * @param listener A listener to be notified with a result */ - void clearCache(ClearIndicesCacheRequest request, ActionListener listener); + void clearCache(ClearIndicesCacheRequest request, ActionListener listener); /** * Clear indices cache. @@ -591,7 +587,7 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Shrinks an index using an explicit request allowing to specify the settings, mappings and aliases of the target index of the index. */ - void resizeIndex(ResizeRequest request, ActionListener listener); + void resizeIndex(ResizeRequest request, ActionListener listener); /** * Swaps the index pointed to by an alias given all provided conditions are satisfied diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 12f3dec804809..c6d9c3a8f3563 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -35,7 +35,6 @@ import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequestBuilder; @@ -129,7 +128,6 @@ import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequestBuilder; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; @@ -144,11 +142,9 @@ import org.elasticsearch.action.admin.indices.flush.FlushAction; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.flush.FlushRequestBuilder; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder; @@ -179,7 +175,6 @@ import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.resolve.ResolveIndexAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; @@ -199,7 +194,6 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeAction; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; import org.elasticsearch.action.admin.indices.shrink.ResizeRequestBuilder; -import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; @@ -275,6 +269,7 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; @@ -806,12 +801,12 @@ public GetTaskRequestBuilder prepareGetTask(TaskId taskId) { } @Override - public ActionFuture cancelTasks(CancelTasksRequest request) { + public ActionFuture cancelTasks(CancelTasksRequest request) { return execute(CancelTasksAction.INSTANCE, request); } @Override - public void cancelTasks(CancelTasksRequest request, ActionListener listener) { + public void cancelTasks(CancelTasksRequest request, ActionListener listener) { execute(CancelTasksAction.INSTANCE, request, listener); } @@ -1118,7 +1113,7 @@ public GetAliasesRequestBuilder prepareGetAliases(String... aliases) { } @Override - public ActionFuture clearCache(final ClearIndicesCacheRequest request) { + public ActionFuture clearCache(final ClearIndicesCacheRequest request) { return execute(ClearIndicesCacheAction.INSTANCE, request); } @@ -1138,7 +1133,7 @@ public GetIndexRequestBuilder prepareGetIndex() { } @Override - public void clearCache(final ClearIndicesCacheRequest request, final ActionListener listener) { + public void clearCache(final ClearIndicesCacheRequest request, final ActionListener listener) { execute(ClearIndicesCacheAction.INSTANCE, request, listener); } @@ -1218,12 +1213,12 @@ public OpenIndexRequestBuilder prepareOpen(String... indices) { } @Override - public ActionFuture flush(final FlushRequest request) { + public ActionFuture flush(final FlushRequest request) { return execute(FlushAction.INSTANCE, request); } @Override - public void flush(final FlushRequest request, final ActionListener listener) { + public void flush(final FlushRequest request, final ActionListener listener) { execute(FlushAction.INSTANCE, request, listener); } @@ -1278,12 +1273,12 @@ public PutMappingRequestBuilder preparePutMapping(String... indices) { } @Override - public ActionFuture forceMerge(final ForceMergeRequest request) { + public ActionFuture forceMerge(final ForceMergeRequest request) { return execute(ForceMergeAction.INSTANCE, request); } @Override - public void forceMerge(final ForceMergeRequest request, final ActionListener listener) { + public void forceMerge(final ForceMergeRequest request, final ActionListener listener) { execute(ForceMergeAction.INSTANCE, request, listener); } @@ -1293,12 +1288,12 @@ public ForceMergeRequestBuilder prepareForceMerge(String... indices) { } @Override - public ActionFuture refresh(final RefreshRequest request) { + public ActionFuture refresh(final RefreshRequest request) { return execute(RefreshAction.INSTANCE, request); } @Override - public void refresh(final RefreshRequest request, final ActionListener listener) { + public void refresh(final RefreshRequest request, final ActionListener listener) { execute(RefreshAction.INSTANCE, request, listener); } @@ -1453,7 +1448,7 @@ public ResizeRequestBuilder prepareResizeIndex(String sourceIndex, String target } @Override - public void resizeIndex(ResizeRequest request, ActionListener listener) { + public void resizeIndex(ResizeRequest request, ActionListener listener) { execute(ResizeAction.INSTANCE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index 5134f153a7fbb..c2cd403836593 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration; import org.elasticsearch.cluster.coordination.CoordinationState.VoteCollection; @@ -140,6 +141,7 @@ public record ClusterFormationState( VotingConfiguration lastCommittedConfiguration, List resolvedAddresses, List foundPeers, + Set mastersOfPeers, long currentTerm, boolean hasDiscoveredQuorum, StatusInfo statusInfo, @@ -151,6 +153,7 @@ public ClusterFormationState( ClusterState clusterState, List resolvedAddresses, List foundPeers, + Set mastersOfPeers, long currentTerm, ElectionStrategy electionStrategy, StatusInfo statusInfo, @@ -166,6 +169,7 @@ public ClusterFormationState( clusterState.getLastCommittedConfiguration(), resolvedAddresses, foundPeers, + mastersOfPeers, currentTerm, calculateHasDiscoveredQuorum( foundPeers, @@ -216,6 +220,9 @@ public ClusterFormationState(StreamInput in) throws IOException { new VotingConfiguration(in), in.readCollectionAsImmutableList(TransportAddress::new), in.readCollectionAsImmutableList(DiscoveryNode::new), + in.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS) + ? in.readCollectionAsImmutableSet(DiscoveryNode::new) + : Set.of(), in.readLong(), in.readBoolean(), new StatusInfo(in), @@ -250,12 +257,19 @@ private String getCoordinatorDescription() { acceptedTerm ); - final StringBuilder foundPeersDescription = new StringBuilder(); + final StringBuilder foundPeersDescription = new StringBuilder("["); DiscoveryNodes.addCommaSeparatedNodesWithoutAttributes(foundPeers.iterator(), foundPeersDescription); + if (mastersOfPeers.isEmpty()) { + foundPeersDescription.append(']'); + } else { + foundPeersDescription.append("] who claim current master to be ["); + DiscoveryNodes.addCommaSeparatedNodesWithoutAttributes(mastersOfPeers.iterator(), foundPeersDescription); + foundPeersDescription.append(']'); + } final String discoveryStateIgnoringQuorum = String.format( Locale.ROOT, - "have discovered [%s]; %s", + "have discovered %s; %s", foundPeersDescription, discoveryWillContinueDescription ); @@ -291,7 +305,7 @@ private String getCoordinatorDescription() { if (lastCommittedConfiguration.equals(VotingConfiguration.MUST_JOIN_ELECTED_MASTER)) { return String.format( Locale.ROOT, - "master not discovered yet and this node was detached from its previous cluster, have discovered [%s]; %s", + "master not discovered yet and this node was detached from its previous cluster, have discovered %s; %s", foundPeersDescription, discoveryWillContinueDescription ); @@ -310,7 +324,7 @@ private String getCoordinatorDescription() { return String.format( Locale.ROOT, - "master not discovered or elected yet, an election requires %s, %s [%s]; %s", + "master not discovered or elected yet, an election requires %s, %s %s; %s", quorumDescription, haveDiscoveredQuorum, foundPeersDescription, @@ -388,6 +402,9 @@ public void writeTo(StreamOutput out) throws IOException { lastCommittedConfiguration.writeTo(out); out.writeCollection(resolvedAddresses); out.writeCollection(foundPeers); + if (out.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS)) { + out.writeCollection(mastersOfPeers); + } out.writeLong(currentTerm); out.writeBoolean(hasDiscoveredQuorum); statusInfo.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 3da890b37ade8..927ca1152a658 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -334,6 +334,7 @@ public ClusterFormationState getClusterFormationState() { getLastAcceptedState(), // doesn't care about blocks or the current master node so no need for getStateForMasterService peerFinder.getLastResolvedAddresses(), Stream.concat(Stream.of(getLocalNode()), StreamSupport.stream(peerFinder.getFoundPeers().spliterator(), false)).toList(), + peerFinder.getMastersOfPeers(), getCurrentTerm(), electionStrategy, nodeHealthService.getHealth(), diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java index 56289ab348a3a..80b4b455912e7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorService.java @@ -10,6 +10,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.core.Nullable; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.HealthIndicatorDetails; @@ -39,14 +40,36 @@ public class StableMasterHealthIndicatorService implements HealthIndicatorService { public static final String NAME = "master_is_stable"; - public static final String GET_HELP_GUIDE = "https://ela.st/getting-help"; + + public static final Diagnosis TROUBLESHOOT_DISCOVERY = new Diagnosis( + new Diagnosis.Definition( + NAME, + "troubleshoot_discovery", + "The Elasticsearch cluster does not have a stable master node.", + "See discovery troubleshooting guidance at " + ReferenceDocs.DISCOVERY_TROUBLESHOOTING, + ReferenceDocs.DISCOVERY_TROUBLESHOOTING.toString() + ), + null + ); + + public static final Diagnosis TROUBLESHOOT_UNSTABLE_CLUSTER = new Diagnosis( + new Diagnosis.Definition( + NAME, + "troubleshoot_unstable_cluster", + "The Elasticsearch cluster does not have a stable master node.", + "See unstable cluster troubleshooting guidance at " + ReferenceDocs.UNSTABLE_CLUSTER_TROUBLESHOOTING, + ReferenceDocs.UNSTABLE_CLUSTER_TROUBLESHOOTING.toString() + ), + null + ); + public static final Diagnosis CONTACT_SUPPORT = new Diagnosis( new Diagnosis.Definition( NAME, "contact_support", "The Elasticsearch cluster does not have a stable master node.", - "Get help at " + GET_HELP_GUIDE, - GET_HELP_GUIDE + "Get help at " + ReferenceDocs.CONTACT_SUPPORT, + ReferenceDocs.CONTACT_SUPPORT.toString() ), null ); @@ -67,12 +90,13 @@ public class StableMasterHealthIndicatorService implements HealthIndicatorServic public static final String BACKUP_DISABLED_IMPACT_ID = "backup_disabled"; // Impacts of having an unstable master: - private static final String UNSTABLE_MASTER_INGEST_IMPACT = "The cluster cannot create, delete, or rebalance indices, and cannot " - + "insert or update documents."; - private static final String UNSTABLE_MASTER_DEPLOYMENT_MANAGEMENT_IMPACT = "Scheduled tasks such as Watcher, Index Lifecycle " - + "Management, and Snapshot Lifecycle Management will not work. The _cat APIs will not work."; - private static final String UNSTABLE_MASTER_BACKUP_IMPACT = "Snapshot and restore will not work, your data will not be backed up. " - + "Searchable snapshots cannot be mounted."; + private static final String UNSTABLE_MASTER_INGEST_IMPACT = """ + The cluster cannot create, delete, or rebalance indices, and cannot insert or update documents."""; + private static final String UNSTABLE_MASTER_DEPLOYMENT_MANAGEMENT_IMPACT = """ + Scheduled tasks such as Watcher, Index Lifecycle Management, and Snapshot Lifecycle Management will not work. \ + The _cat APIs will not work."""; + private static final String UNSTABLE_MASTER_BACKUP_IMPACT = """ + Snapshot and restore will not work. Your data will not be backed up, and searchable snapshots cannot be mounted."""; /** * This is the list of the impacts to be reported when the master node is determined to be unstable. @@ -128,7 +152,7 @@ HealthIndicatorResult getHealthIndicatorResult( HealthStatus status = HealthStatus.fromCoordinationDiagnosticsStatus(coordinationDiagnosticsResult.status()); HealthIndicatorDetails details = getDetails(coordinationDiagnosticsResult.details(), explain); Collection impacts = status.indicatesHealthProblem() ? UNSTABLE_MASTER_IMPACTS : List.of(); - List diagnosis = status.indicatesHealthProblem() ? getContactSupportUserActions(explain) : List.of(); + List diagnosis = status.indicatesHealthProblem() ? getUnstableMasterDiagnoses(explain) : List.of(); return createIndicator(status, coordinationDiagnosticsResult.summary(), details, impacts, diagnosis); } @@ -215,13 +239,16 @@ private String getNameForNodeId(String nodeId) { } /** - * This method returns the only user action that is relevant when the master is unstable -- contact support. - * @param explain If true, the returned list includes a UserAction to contact support, otherwise an empty list - * @return a single UserAction instructing users to contact support. + * This method returns the relevant user actions when the master is unstable, linking to some troubleshooting docs and suggesting to + * contact support. + * + * @param explain If true, the returned list includes UserActions linking to troubleshooting docs and another to contact support, + * otherwise an empty list. + * @return the relevant user actions when the master is unstable. */ - private static List getContactSupportUserActions(boolean explain) { + private List getUnstableMasterDiagnoses(boolean explain) { if (explain) { - return List.of(CONTACT_SUPPORT); + return List.of(TROUBLESHOOT_DISCOVERY, TROUBLESHOOT_UNSTABLE_CLUSTER, CONTACT_SUPPORT); } else { return List.of(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 84db5887b5926..ff31c6fe950d7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -907,7 +907,7 @@ public DataStream(StreamInput in) throws IOException { in.readBoolean(), in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) ? in.readBoolean() : false, in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) ? in.readOptionalEnum(IndexMode.class) : null, - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null, in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? in.readBoolean() : false, in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? readIndices(in) : List.of(), in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED) ? in.readBoolean() : false @@ -944,7 +944,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) { out.writeOptionalEnum(indexMode); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(lifecycle); } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION)) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java index 83a5d99c8f348..215ed515748ab 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java @@ -48,7 +48,7 @@ public class DataStreamLifecycle implements SimpleDiffable, ToXContentObject { // Versions over the wire - public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_500_061; + public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_10_X; public static final String DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME = "data_streams.lifecycle_only.mode"; @@ -187,7 +187,7 @@ public int hashCode() { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(dataRetention); } if (out.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) { @@ -197,7 +197,7 @@ public void writeTo(StreamOutput out) throws IOException { } public DataStreamLifecycle(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { dataRetention = in.readOptionalWriteable(Retention::read); } else { dataRetention = null; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java index aaf256a49a0a5..3453b3b6d70ff 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/SingleNodeShutdownMetadata.java @@ -35,8 +35,8 @@ public class SingleNodeShutdownMetadata implements SimpleDiffable, ToXContentObject { public static final TransportVersion REPLACE_SHUTDOWN_TYPE_ADDED_VERSION = TransportVersions.V_7_16_0; - public static final TransportVersion SIGTERM_ADDED_VERSION = TransportVersions.V_8_500_020; - public static final TransportVersion GRACE_PERIOD_ADDED_VERSION = TransportVersions.V_8_500_020; + public static final TransportVersion SIGTERM_ADDED_VERSION = TransportVersions.V_8_9_X; + public static final TransportVersion GRACE_PERIOD_ADDED_VERSION = TransportVersions.V_8_9_X; public static final ParseField NODE_ID_FIELD = new ParseField("node_id"); public static final ParseField TYPE_FIELD = new ParseField("type"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java index d36b70b49c6ab..18a99f984707f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Template.java @@ -123,7 +123,7 @@ public Template(StreamInput in) throws IOException { } if (in.getTransportVersion().onOrAfter(DataStreamLifecycle.ADDED_ENABLED_FLAG_VERSION)) { this.lifecycle = in.readOptionalWriteable(DataStreamLifecycle::new); - } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + } else if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { boolean isExplicitNull = in.readBoolean(); if (isExplicitNull) { this.lifecycle = DataStreamLifecycle.newBuilder().enabled(false).build(); @@ -177,7 +177,7 @@ public void writeTo(StreamOutput out) throws IOException { } if (out.getTransportVersion().onOrAfter(DataStreamLifecycle.ADDED_ENABLED_FLAG_VERSION)) { out.writeOptionalWriteable(lifecycle); - } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + } else if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { boolean isExplicitNull = lifecycle != null && lifecycle.isEnabled() == false; out.writeBoolean(isExplicitNull); if (isExplicitNull == false) { diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index e77a7b27e1a2c..01b67068db31f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -337,7 +337,7 @@ public DiscoveryNode(StreamInput in) throws IOException { } } this.roles = Collections.unmodifiableSortedSet(roles); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { versionInfo = new VersionInformation(Version.readVersion(in), IndexVersion.readVersion(in), IndexVersion.readVersion(in)); } else { versionInfo = inferVersionInformation(Version.readVersion(in)); @@ -374,7 +374,7 @@ public void writeTo(StreamOutput out) throws IOException { o.writeString(role.roleNameAbbreviation()); o.writeBoolean(role.canContainData()); }); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { Version.writeVersion(versionInfo.nodeVersion(), out); IndexVersion.writeVersion(versionInfo.minIndexVersion(), out); IndexVersion.writeVersion(versionInfo.maxIndexVersion(), out); diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index cd2c927d87f69..918056fea9ec6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -667,7 +667,7 @@ public String shortSummary() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(masterNodeId); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeVLong(nodeLeftGeneration); } // else nodeLeftGeneration is zero, or we're sending this to a remote cluster which does not care about the nodeLeftGeneration out.writeCollection(nodes.values()); @@ -682,7 +682,7 @@ public static DiscoveryNodes readFrom(StreamInput in, DiscoveryNode localNode) t builder.localNodeId(localNode.getId()); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { builder.nodeLeftGeneration(in.readVLong()); } // else nodeLeftGeneration is zero, or we're receiving this from a remote cluster so the nodeLeftGeneration does not matter to us diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index 80c969cc1b084..67a9e23f2297f 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -69,6 +69,7 @@ public enum ReferenceDocs { BOOTSTRAP_CHECK_TLS, BOOTSTRAP_CHECK_TOKEN_SSL, BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP, + CONTACT_SUPPORT, // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner ; diff --git a/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java index 905373f9400f6..8973ae6e9dd3a 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java @@ -32,7 +32,6 @@ public final class ReleasableBytesReference implements RefCounted, Releasable, B private final RefCounted refCounted; public static ReleasableBytesReference empty() { - EMPTY.incRef(); return EMPTY; } @@ -147,6 +146,9 @@ public StreamInput streamInput() throws IOException { assert hasReferences(); return new BytesReferenceStreamInput(this) { private ReleasableBytesReference retainAndSkip(int len) throws IOException { + if (len == 0) { + return ReleasableBytesReference.empty(); + } // instead of reading the bytes from a stream we just create a slice of the underlying bytes final ReleasableBytesReference result = retainedSlice(offset(), len); // move the stream manually since creating the slice didn't move it diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index d7774d5c0a7ea..e6865e5c66e74 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -329,6 +329,29 @@ public boolean offer(E e) { } } + // Overridden to workaround a JDK bug introduced in JDK 21.0.2 + // https://bugs.openjdk.org/browse/JDK-8323659 + @Override + public void put(E e) { + // As the queue is unbounded, this method will always add to the queue. + super.offer(e); + } + + // Overridden to workaround a JDK bug introduced in JDK 21.0.2 + // https://bugs.openjdk.org/browse/JDK-8323659 + @Override + public boolean add(E e) { + // As the queue is unbounded, this method will never return false. + return super.offer(e); + } + + // Overridden to workaround a JDK bug introduced in JDK 21.0.2 + // https://bugs.openjdk.org/browse/JDK-8323659 + @Override + public boolean offer(E e, long timeout, TimeUnit unit) { + // As the queue is unbounded, this method will never return false. + return super.offer(e); + } } /** diff --git a/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java b/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java index ec315f5200978..5289ac57e10ca 100644 --- a/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java +++ b/server/src/main/java/org/elasticsearch/discovery/PeerFinder.java @@ -40,6 +40,7 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.Executor; +import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static org.elasticsearch.core.Strings.format; @@ -352,10 +353,17 @@ protected void startProbe(TransportAddress transportAddress) { } } + public Set getMastersOfPeers() { + synchronized (mutex) { + return peersByAddress.values().stream().flatMap(p -> p.lastKnownMasterNode.stream()).collect(Collectors.toSet()); + } + } + private class Peer { private final TransportAddress transportAddress; private final SetOnce probeConnectionResult = new SetOnce<>(); private volatile boolean peersRequestInFlight; + private Optional lastKnownMasterNode = Optional.empty(); Peer(TransportAddress transportAddress) { this.transportAddress = transportAddress; @@ -439,9 +447,20 @@ public void onResponse(ProbeConnectionResult connectResult) { @Override public void onFailure(Exception e) { if (verboseFailureLogging) { + + final String believedMasterBy; + synchronized (mutex) { + believedMasterBy = peersByAddress.values() + .stream() + .filter(p -> p.lastKnownMasterNode.map(DiscoveryNode::getAddress).equals(Optional.of(transportAddress))) + .findFirst() + .map(p -> " [current master according to " + p.getDiscoveryNode().descriptionWithoutAttributes() + "]") + .orElse(""); + } + if (logger.isDebugEnabled()) { // log message at level WARN, but since DEBUG logging is enabled we include the full stack trace - logger.warn(() -> format("%s discovery result", Peer.this), e); + logger.warn(() -> format("%s%s discovery result", Peer.this, believedMasterBy), e); } else { final StringBuilder messageBuilder = new StringBuilder(); Throwable cause = e; @@ -452,7 +471,7 @@ public void onFailure(Exception e) { final String message = messageBuilder.length() < 1024 ? messageBuilder.toString() : (messageBuilder.substring(0, 1023) + "..."); - logger.warn("{} discovery result{}", Peer.this, message); + logger.warn("{}{} discovery result{}", Peer.this, believedMasterBy, message); } } else { logger.debug(() -> format("%s discovery result", Peer.this), e); @@ -504,6 +523,7 @@ public void handleResponse(PeersResponse response) { return; } + lastKnownMasterNode = response.getMasterNode(); response.getMasterNode().ifPresent(node -> startProbe(node.getAddress())); for (DiscoveryNode node : response.getKnownPeers()) { startProbe(node.getAddress()); @@ -545,7 +565,13 @@ Releasable getConnectionReference() { @Override public String toString() { - return "address [" + transportAddress + "], node [" + getDiscoveryNode() + "], requesting [" + peersRequestInFlight + "]"; + return "address [" + + transportAddress + + "], node [" + + Optional.ofNullable(probeConnectionResult.get()) + .map(result -> result.getDiscoveryNode().descriptionWithoutAttributes()) + .orElse("unknown") + + (peersRequestInFlight ? " [request in flight]" : ""); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index 187d59a88e2fd..d5098e1021a1c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -128,7 +128,7 @@ protected Object parseSourceValue(Object value) { }; } - public ValueFetcher valueFetcher(Set sourcePaths, Object nullValue, String format) { + public ValueFetcher valueFetcher(Set sourcePaths, T nullValue, String format) { Function, List> formatter = getFormatter(format != null ? format : GeometryFormatterFactory.GEOJSON); return new ArraySourceValueFetcher(sourcePaths, nullValueAsSource(nullValue)) { @Override @@ -140,7 +140,15 @@ protected Object parseSourceValue(Object value) { }; } - protected abstract Object nullValueAsSource(Object nullValue); + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + // Currently we can only load from source in ESQL + ValueFetcher fetcher = valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKB); + // TODO consider optimization using BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) + return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); + } + + protected abstract Object nullValueAsSource(T nullValue); } private final Explicit ignoreMalformed; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java index 031b67c263505..be6e00d5c7b45 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractPointGeometryFieldMapper.java @@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.TriFunction; -import org.elasticsearch.common.geo.GeometryFormatterFactory; import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedFunction; @@ -174,20 +173,8 @@ protected AbstractPointFieldType( } @Override - protected Object nullValueAsSource(Object nullValue) { - if (nullValue == null) { - return null; - } - SpatialPoint point = (SpatialPoint) nullValue; - return point.toWKT(); - } - - @Override - public BlockLoader blockLoader(BlockLoaderContext blContext) { - // Currently we can only load from source in ESQL - ValueFetcher fetcher = valueFetcher(blContext.sourcePaths(name()), nullValue, GeometryFormatterFactory.WKB); - // TODO consider optimization using BlockSourceReader.lookupFromFieldNames(blContext.fieldNames(), name()) - return new BlockSourceReader.GeometriesBlockLoader(fetcher, BlockSourceReader.lookupMatchingAll()); + protected Object nullValueAsSource(T nullValue) { + return nullValue == null ? null : nullValue.toWKT(); } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java index c18c4db955a43..56f1faeb38a5b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractShapeGeometryFieldMapper.java @@ -64,14 +64,8 @@ public Orientation orientation() { } @Override - public BlockLoader blockLoader(BlockLoaderContext blContext) { - // TODO: Support shapes in ESQL - return null; - } - - @Override - protected Object nullValueAsSource(Object nullValue) { - // TODO: When we support shapes in ESQL; we need to return a shape in source format here + protected Object nullValueAsSource(T nullValue) { + // we don't support null value fors shapes return nullValue; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 27424d4591ba6..f165361ded105 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -29,6 +29,15 @@ import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.queries.function.FunctionQuery; +import org.apache.lucene.queries.function.valuesource.ByteKnnVectorFieldSource; +import org.apache.lucene.queries.function.valuesource.ByteVectorSimilarityFunction; +import org.apache.lucene.queries.function.valuesource.ConstKnnByteVectorValueSource; +import org.apache.lucene.queries.function.valuesource.ConstKnnFloatValueSource; +import org.apache.lucene.queries.function.valuesource.FloatKnnVectorFieldSource; +import org.apache.lucene.queries.function.valuesource.FloatVectorSimilarityFunction; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.KnnByteVectorQuery; import org.apache.lucene.search.Query; @@ -1063,6 +1072,67 @@ public Query createKnnQuery( return knnQuery; } + public Query createExactKnnQuery(float[] queryVector) { + if (isIndexed() == false) { + throw new IllegalArgumentException( + "to perform knn search on field [" + name() + "], its mapping must have [index] set to [true]" + ); + } + if (queryVector.length != dims) { + throw new IllegalArgumentException( + "the query vector has a different dimension [" + queryVector.length + "] than the index vectors [" + dims + "]" + ); + } + elementType.checkVectorBounds(queryVector); + if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { + float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); + elementType.checkVectorMagnitude(similarity, ElementType.errorFloatElementsAppender(queryVector), squaredMagnitude); + if (similarity == VectorSimilarity.COSINE + && ElementType.FLOAT.equals(elementType) + && indexVersionCreated.onOrAfter(NORMALIZE_COSINE) + && isNotUnitVector(squaredMagnitude)) { + float length = (float) Math.sqrt(squaredMagnitude); + queryVector = Arrays.copyOf(queryVector, queryVector.length); + for (int i = 0; i < queryVector.length; i++) { + queryVector[i] /= length; + } + } + } + VectorSimilarityFunction vectorSimilarityFunction = similarity.vectorSimilarityFunction(indexVersionCreated, elementType); + return switch (elementType) { + case BYTE -> { + byte[] bytes = new byte[queryVector.length]; + for (int i = 0; i < queryVector.length; i++) { + bytes[i] = (byte) queryVector[i]; + } + yield new BooleanQuery.Builder().add(new FieldExistsQuery(name()), BooleanClause.Occur.FILTER) + .add( + new FunctionQuery( + new ByteVectorSimilarityFunction( + vectorSimilarityFunction, + new ByteKnnVectorFieldSource(name()), + new ConstKnnByteVectorValueSource(bytes) + ) + ), + BooleanClause.Occur.SHOULD + ) + .build(); + } + case FLOAT -> new BooleanQuery.Builder().add(new FieldExistsQuery(name()), BooleanClause.Occur.FILTER) + .add( + new FunctionQuery( + new FloatVectorSimilarityFunction( + vectorSimilarityFunction, + new FloatKnnVectorFieldSource(name()), + new ConstKnnFloatValueSource(queryVector) + ) + ), + BooleanClause.Occur.SHOULD + ) + .build(); + }; + } + public Query createKnnQuery( float[] queryVector, int numCands, @@ -1082,7 +1152,6 @@ public Query createKnnQuery( ); } elementType.checkVectorBounds(queryVector); - if (similarity == VectorSimilarity.DOT_PRODUCT || similarity == VectorSimilarity.COSINE) { float squaredMagnitude = VectorUtil.dotProduct(queryVector, queryVector); elementType.checkVectorMagnitude(similarity, ElementType.errorFloatElementsAppender(queryVector), squaredMagnitude); @@ -1110,6 +1179,7 @@ && isNotUnitVector(squaredMagnitude)) { case FLOAT -> parentFilter != null ? new ProfilingDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) : new ProfilingKnnFloatVectorQuery(name(), queryVector, numCands, filter); + }; if (similarityThreshold != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index f86142ffbe862..d3d7b46d3d729 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -296,6 +296,10 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws if (queryRewriteContext == null) { return this; } + final InnerHitsRewriteContext ihrc = queryRewriteContext.convertToInnerHitsRewriteContext(); + if (ihrc != null) { + return doInnerHitsRewrite(ihrc); + } final CoordinatorRewriteContext crc = queryRewriteContext.convertToCoordinatorRewriteContext(); if (crc != null) { return doCoordinatorRewrite(crc); @@ -342,6 +346,16 @@ protected QueryBuilder doIndexMetadataRewrite(final QueryRewriteContext context) return this; } + /** + * Optional rewrite logic that allows for optimization for extracting inner hits + * @param context an {@link InnerHitsRewriteContext} instance + * @return A {@link QueryBuilder} representing the rewritten query optimized for inner hit extraction + * @throws IOException if an error occurs while rewriting the query + */ + protected QueryBuilder doInnerHitsRewrite(final InnerHitsRewriteContext context) throws IOException { + return this; + } + /** * For internal usage only! * diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitsRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitsRewriteContext.java new file mode 100644 index 0000000000000..0b437fa451e1b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitsRewriteContext.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.index.query; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.xcontent.XContentParserConfiguration; + +import java.util.function.LongSupplier; + +/** + * Context object used to rewrite {@link QueryBuilder} instances into an optimized version for extracting inner_hits. + */ +public final class InnerHitsRewriteContext extends QueryRewriteContext { + public InnerHitsRewriteContext(final XContentParserConfiguration parserConfiguration, final LongSupplier nowInMillis) { + super(parserConfiguration, null, nowInMillis); + } + + @Override + public InnerHitsRewriteContext convertToInnerHitsRewriteContext() { + return this; + } + + @Override + @SuppressWarnings({ "rawtypes" }) + public void executeAsyncActions(ActionListener listener) { + // InnerHitsRewriteContext does not support async actions at all, and doesn't supply a valid `client` object + throw new UnsupportedOperationException("InnerHitsRewriteContext does not support async actions"); + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java index 04ae0bb498841..47e4cf7273703 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java @@ -39,14 +39,14 @@ public MatchNoneQueryBuilder(String rewriteReason) { */ public MatchNoneQueryBuilder(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { rewriteReason = in.readOptionalString(); } } @Override protected void doWriteTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeOptionalString(rewriteReason); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index 9a8800c05bdb2..e36c4d608d59f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -159,6 +159,10 @@ public DataRewriteContext convertToDataRewriteContext() { return null; } + public InnerHitsRewriteContext convertToInnerHitsRewriteContext() { + return null; + } + /** * Returns the {@link MappedFieldType} for the provided field name. * If the field is not mapped, the behaviour depends on the index.query.parse.allow_unmapped_fields setting, which defaults to true. diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 5a2b01838e27b..63cd598caa784 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -91,7 +91,7 @@ public final class SimpleQueryStringBuilder extends AbstractQueryBuilder metrics; private NodeStatsCache stats; + private final TimeValue cacheExpiry; /** * Constructs a new NodeMetrics instance. * - * @param meterRegistry The MeterRegistry used to register metrics. - * @param nodeService The NodeService for interacting with the Elasticsearch node and extracting statistics. - */ - public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService) { + * @param meterRegistry The MeterRegistry used to register metrics. + * @param nodeService The NodeService for interacting with the Elasticsearch node and extracting statistics. + * @param metricsInterval The interval at which the agent sends metrics to the APM Server + * */ + public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService, TimeValue metricsInterval) { this.registry = meterRegistry; this.nodeService = nodeService; this.metrics = new ArrayList<>(17); + // we set the cache to expire after half the interval at which the agent sends + // metrics to the APM Server so that there is enough time for the cache not + // update during the same poll period and that expires before a new poll period + this.cacheExpiry = new TimeValue(metricsInterval.getMillis() / 2); } /** @@ -57,10 +63,7 @@ public NodeMetrics(MeterRegistry meterRegistry, NodeService nodeService) { * @param registry The MeterRegistry used to register and collect metrics. */ private void registerAsyncMetrics(MeterRegistry registry) { - // Agent should poll stats every 4 minutes and being this cache is lazy we need a - // number high enough so that the cache does not update during the same poll - // period and that expires before a new poll period, therefore we choose 1 minute. - this.stats = new NodeStatsCache(TimeValue.timeValueMinutes(1)); + this.stats = new NodeStatsCache(cacheExpiry); metrics.add( registry.registerLongAsyncCounter( "es.indices.get.total", diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index aa62ea689a5a9..d61d09cdac498 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -78,6 +78,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; @@ -966,7 +967,8 @@ record PluginServiceInstances( repositoryService ); - final NodeMetrics nodeMetrics = new NodeMetrics(telemetryProvider.getMeterRegistry(), nodeService); + final TimeValue metricsInterval = settings.getAsTime("tracing.apm.agent.metrics_interval", TimeValue.timeValueSeconds(10)); + final NodeMetrics nodeMetrics = new NodeMetrics(telemetryProvider.getMeterRegistry(), nodeService, metricsInterval); final SearchService searchService = serviceProvider.newSearchService( pluginsService, diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java index 32188d55e418a..63c97685c913e 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksNodeService.java @@ -10,7 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.common.Strings; @@ -300,9 +300,9 @@ private void cancelTask(Long allocationId) { if (task.markAsCancelled()) { // Cancel the local task using the task manager String reason = "task has been removed, cancelling locally"; - persistentTasksService.sendCancelRequest(task.getId(), reason, new ActionListener() { + persistentTasksService.sendCancelRequest(task.getId(), reason, new ActionListener<>() { @Override - public void onResponse(CancelTasksResponse cancelTasksResponse) { + public void onResponse(ListTasksResponse cancelTasksResponse) { logger.trace( "Persistent task [{}] with id [{}] and allocation id [{}] was cancelled", task.getAction(), diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java index 869a93110d257..227569341919a 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; @@ -90,7 +90,7 @@ public void sendCompletionRequest( /** * Cancels a locally running task using the Task Manager API */ - void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { + void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { CancelTasksRequest request = new CancelTasksRequest(); request.setTargetTaskId(new TaskId(clusterService.localNode().getId(), taskId)); request.setReason(reason); diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java index 722779a646824..b9cce9e3ec500 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesStats.java @@ -28,7 +28,7 @@ public class RepositoriesStats implements Writeable, ToXContentFragment { private final Map repositoryThrottlingStats; public RepositoriesStats(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { repositoryThrottlingStats = in.readMap(ThrottlingStats::new); } else { repositoryThrottlingStats = new HashMap<>(); @@ -41,7 +41,7 @@ public RepositoriesStats(Map repositoryThrottlingStats) @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeMap(repositoryThrottlingStats, StreamOutput::writeWriteable); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java index 4c9ac8fcb9a3c..815c3ce7e2c33 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java @@ -11,9 +11,9 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.SubscribableListener; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.rest.BaseRestHandler; @@ -65,7 +65,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (validationException != null) { throw validationException; } - final var responseListener = new SubscribableListener(); + final var responseListener = new SubscribableListener(); final var task = client.executeLocally(ForceMergeAction.INSTANCE, mergeRequest, responseListener); responseListener.addListener(new LoggingTaskListener<>(task)); return sendTask(client.getLocalNodeId(), task); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java index cf238d57c4cab..97964b09593f5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSyncedFlushAction.java @@ -9,8 +9,8 @@ package org.elasticsearch.rest.action.admin.indices; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; @@ -55,14 +55,14 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC return channel -> client.admin().indices().flush(flushRequest, new SimulateSyncedFlushResponseListener(channel)); } - static final class SimulateSyncedFlushResponseListener extends RestBuilderListener { + static final class SimulateSyncedFlushResponseListener extends RestBuilderListener { SimulateSyncedFlushResponseListener(RestChannel channel) { super(channel); } @Override - public RestResponse buildResponse(FlushResponse flushResponse, XContentBuilder builder) throws Exception { + public RestResponse buildResponse(BroadcastResponse flushResponse, XContentBuilder builder) throws Exception { builder.startObject(); buildSyncedFlushResponse(builder, flushResponse); builder.endObject(); @@ -70,7 +70,7 @@ public RestResponse buildResponse(FlushResponse flushResponse, XContentBuilder b return new RestResponse(restStatus, builder); } - private static void buildSyncedFlushResponse(XContentBuilder builder, FlushResponse flushResponse) throws IOException { + private static void buildSyncedFlushResponse(XContentBuilder builder, BroadcastResponse flushResponse) throws IOException { builder.startObject("_shards"); builder.field("total", flushResponse.getTotalShards()); builder.field("successful", flushResponse.getSuccessfulShards()); diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index d2fc20ab83269..dde044bf15115 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -13,6 +13,7 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.document.DocumentField; @@ -24,7 +25,9 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.MapperService; @@ -35,6 +38,7 @@ import org.elasticsearch.search.fetch.subphase.LookupField; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.Source; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; @@ -55,6 +59,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -70,7 +75,7 @@ * * @see SearchHits */ -public final class SearchHit implements Writeable, ToXContentObject { +public final class SearchHit implements Writeable, ToXContentObject, RefCounted { private final transient int docId; @@ -114,6 +119,8 @@ public final class SearchHit implements Writeable, ToXContentObject { private Map innerHits; + private final RefCounted refCounted; + // used only in tests public SearchHit(int docId) { this(docId, null); @@ -124,6 +131,10 @@ public SearchHit(int docId, String id) { } public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity) { + this(nestedTopDocId, id, nestedIdentity, null); + } + + private SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity, @Nullable RefCounted refCounted) { this( nestedTopDocId, DEFAULT_SCORE, @@ -142,8 +153,10 @@ public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity) { null, null, null, + null, new HashMap<>(), - new HashMap<>() + new HashMap<>(), + refCounted ); } @@ -164,9 +177,11 @@ public SearchHit( SearchShardTarget shard, String index, String clusterAlias, + Map sourceAsMap, Map innerHits, Map documentFields, - Map metaFields + Map metaFields, + @Nullable RefCounted refCounted ) { this.docId = docId; this.score = score; @@ -184,12 +199,28 @@ public SearchHit( this.shard = shard; this.index = index; this.clusterAlias = clusterAlias; + this.sourceAsMap = sourceAsMap; this.innerHits = innerHits; this.documentFields = documentFields; this.metaFields = metaFields; + this.refCounted = refCounted == null ? LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + if (SearchHit.this.innerHits != null) { + for (SearchHits h : SearchHit.this.innerHits.values()) { + h.decRef(); + } + SearchHit.this.innerHits = null; + } + if (SearchHit.this.source instanceof RefCounted r) { + r.decRef(); + } + SearchHit.this.source = null; + } + }) : ALWAYS_REFERENCED; } - public static SearchHit readFrom(StreamInput in) throws IOException { + public static SearchHit readFrom(StreamInput in, boolean pooled) throws IOException { final float score = in.readFloat(); final int rank; if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { @@ -205,7 +236,7 @@ public static SearchHit readFrom(StreamInput in) throws IOException { final long version = in.readLong(); final long seqNo = in.readZLong(); final long primaryTerm = in.readVLong(); - BytesReference source = in.readBytesReference(); + BytesReference source = pooled ? in.readReleasableBytesReference() : in.readBytesReference(); if (source.length() == 0) { source = null; } @@ -244,7 +275,7 @@ public static SearchHit readFrom(StreamInput in) throws IOException { if (size > 0) { innerHits = Maps.newMapWithExpectedSize(size); for (int i = 0; i < size; i++) { - innerHits.put(in.readString(), new SearchHits(in)); + innerHits.put(in.readString(), SearchHits.readFrom(in, pooled)); } } else { innerHits = null; @@ -266,16 +297,31 @@ public static SearchHit readFrom(StreamInput in) throws IOException { shardTarget, index, clusterAlias, + null, innerHits, documentFields, - metaFields + metaFields, + pooled ? null : ALWAYS_REFERENCED ); } + public static SearchHit unpooled(int docId) { + return unpooled(docId, null); + } + + public static SearchHit unpooled(int docId, String id) { + return unpooled(docId, id, null); + } + + public static SearchHit unpooled(int nestedTopDocId, String id, NestedIdentity nestedIdentity) { + return new SearchHit(nestedTopDocId, id, nestedIdentity, ALWAYS_REFERENCED); + } + private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); @Override public void writeTo(StreamOutput out) throws IOException { + assert hasReferences(); out.writeFloat(score); if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeVInt(rank); @@ -401,6 +447,7 @@ public NestedIdentity getNestedIdentity() { * Returns bytes reference, also uncompress the source if needed. */ public BytesReference getSourceRef() { + assert hasReferences(); if (this.source == null) { return null; } @@ -427,6 +474,7 @@ public SearchHit sourceRef(BytesReference source) { * {@code _source} or if source is disabled in the mapping. */ public boolean hasSource() { + assert hasReferences(); return source != null; } @@ -434,6 +482,7 @@ public boolean hasSource() { * The source of the document as string (can be {@code null}). */ public String getSourceAsString() { + assert hasReferences(); if (source == null) { return null; } @@ -448,6 +497,7 @@ public String getSourceAsString() { * The source of the document as a map (can be {@code null}). */ public Map getSourceAsMap() { + assert hasReferences(); if (source == null) { return null; } @@ -463,6 +513,7 @@ public Map getSourceAsMap() { * The hit field matching the given field name. */ public DocumentField field(String fieldName) { + assert hasReferences(); DocumentField result = documentFields.get(fieldName); if (result != null) { return result; @@ -653,13 +704,72 @@ public Map getMatchedQueriesAndScores() { * @return Inner hits or null if there are none */ public Map getInnerHits() { + assert hasReferences(); return innerHits; } public void setInnerHits(Map innerHits) { + assert innerHits == null || innerHits.values().stream().noneMatch(h -> h.hasReferences() == false); + assert this.innerHits == null; this.innerHits = innerHits; } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + + public SearchHit asUnpooled() { + assert hasReferences(); + if (isPooled() == false) { + return this; + } + return new SearchHit( + docId, + score, + rank, + id, + nestedIdentity, + version, + seqNo, + primaryTerm, + source instanceof RefCounted ? new BytesArray(source.toBytesRef(), true) : source, + highlightFields, + sortValues, + matchedQueries, + explanation, + shard, + index, + clusterAlias, + sourceAsMap, + innerHits == null + ? null + : innerHits.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().asUnpooled())), + documentFields, + metaFields, + ALWAYS_REFERENCED + ); + } + + public boolean isPooled() { + return refCounted != ALWAYS_REFERENCED; + } + public static class Fields { static final String _INDEX = "_index"; static final String _ID = "_id"; @@ -690,6 +800,7 @@ public static class Fields { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + assert hasReferences(); builder.startObject(); toInnerXContent(builder, params); builder.endObject(); @@ -972,9 +1083,11 @@ public static SearchHit createFromMap(Map values) { shardTarget, index, clusterAlias, + null, get(Fields.INNER_HITS, values, null), get(DOCUMENT_FIELDS, values, Collections.emptyMap()), - get(METADATA_FIELDS, values, Collections.emptyMap()) + get(METADATA_FIELDS, values, Collections.emptyMap()), + ALWAYS_REFERENCED // TODO: do we ever want pooling here? ); } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index c689f928954d2..a5c9425ba754c 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -18,8 +18,11 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; @@ -32,7 +35,7 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -public final class SearchHits implements Writeable, ChunkedToXContent, Iterable { +public final class SearchHits implements Writeable, ChunkedToXContent, RefCounted, Iterable { public static final SearchHit[] EMPTY = new SearchHit[0]; public static final SearchHits EMPTY_WITH_TOTAL_HITS = SearchHits.empty(new TotalHits(0, Relation.EQUAL_TO), 0); @@ -48,6 +51,8 @@ public final class SearchHits implements Writeable, ChunkedToXContent, Iterable< @Nullable private final Object[] collapseValues; + private final RefCounted refCounted; + public static SearchHits empty(@Nullable TotalHits totalHits, float maxScore) { return new SearchHits(EMPTY, totalHits, maxScore); } @@ -63,6 +68,35 @@ public SearchHits( @Nullable SortField[] sortFields, @Nullable String collapseField, @Nullable Object[] collapseValues + ) { + this( + hits, + totalHits, + maxScore, + sortFields, + collapseField, + collapseValues, + hits.length == 0 ? ALWAYS_REFERENCED : LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + for (int i = 0; i < hits.length; i++) { + assert hits[i] != null; + hits[i].decRef(); + hits[i] = null; + } + } + }) + ); + } + + private SearchHits( + SearchHit[] hits, + @Nullable TotalHits totalHits, + float maxScore, + @Nullable SortField[] sortFields, + @Nullable String collapseField, + @Nullable Object[] collapseValues, + RefCounted refCounted ) { this.hits = hits; this.totalHits = totalHits; @@ -70,32 +104,64 @@ public SearchHits( this.sortFields = sortFields; this.collapseField = collapseField; this.collapseValues = collapseValues; + this.refCounted = refCounted; } - public SearchHits(StreamInput in) throws IOException { + public static SearchHits unpooled(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore) { + return unpooled(hits, totalHits, maxScore, null, null, null); + } + + public static SearchHits unpooled( + SearchHit[] hits, + @Nullable TotalHits totalHits, + float maxScore, + @Nullable SortField[] sortFields, + @Nullable String collapseField, + @Nullable Object[] collapseValues + ) { + assert assertUnpooled(hits); + return new SearchHits(hits, totalHits, maxScore, sortFields, collapseField, collapseValues, ALWAYS_REFERENCED); + } + + private static boolean assertUnpooled(SearchHit[] searchHits) { + for (SearchHit searchHit : searchHits) { + assert searchHit.isPooled() == false : "hit was pooled [" + searchHit + "]"; + } + return true; + } + + public static SearchHits readFrom(StreamInput in, boolean pooled) throws IOException { + final TotalHits totalHits; if (in.readBoolean()) { totalHits = Lucene.readTotalHits(in); } else { // track_total_hits is false totalHits = null; } - maxScore = in.readFloat(); + final float maxScore = in.readFloat(); int size = in.readVInt(); + final SearchHit[] hits; if (size == 0) { hits = EMPTY; } else { hits = new SearchHit[size]; for (int i = 0; i < hits.length; i++) { - hits[i] = SearchHit.readFrom(in); + hits[i] = SearchHit.readFrom(in, pooled); } } - sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new); - collapseField = in.readOptionalString(); - collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new); + var sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new); + var collapseField = in.readOptionalString(); + var collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new); + if (pooled) { + return new SearchHits(hits, totalHits, maxScore, sortFields, collapseField, collapseValues); + } else { + return unpooled(hits, totalHits, maxScore, sortFields, collapseField, collapseValues); + } } @Override public void writeTo(StreamOutput out) throws IOException { + assert hasReferences(); final boolean hasTotalHits = totalHits != null; out.writeBoolean(hasTotalHits); if (hasTotalHits) { @@ -128,6 +194,7 @@ public float getMaxScore() { * The hits of the search request (based on the search type, and from / size provided). */ public SearchHit[] getHits() { + assert hasReferences(); return this.hits; } @@ -135,6 +202,7 @@ public SearchHit[] getHits() { * Return the hit as the provided position. */ public SearchHit getAt(int position) { + assert hasReferences(); return hits[position]; } @@ -165,9 +233,42 @@ public Object[] getCollapseValues() { @Override public Iterator iterator() { + assert hasReferences(); return Iterators.forArray(getHits()); } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + + public SearchHits asUnpooled() { + assert hasReferences(); + if (refCounted == ALWAYS_REFERENCED) { + return this; + } + final SearchHit[] unpooledHits = new SearchHit[hits.length]; + for (int i = 0; i < hits.length; i++) { + unpooledHits[i] = hits[i].asUnpooled(); + } + return unpooled(unpooledHits, totalHits, maxScore, sortFields, collapseField, collapseValues); + } + public static final class Fields { public static final String HITS = "hits"; public static final String TOTAL = "total"; @@ -176,6 +277,7 @@ public static final class Fields { @Override public Iterator toXContentChunked(ToXContent.Params params) { + assert hasReferences(); return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { @@ -239,7 +341,7 @@ public static SearchHits fromXContent(XContentParser parser) throws IOException } } } - return new SearchHits(hits.toArray(SearchHits.EMPTY), totalHits, maxScore); + return SearchHits.unpooled(hits.toArray(SearchHits.EMPTY), totalHits, maxScore); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 279c36fae0c32..d8b380868748b 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -251,6 +251,7 @@ import org.elasticsearch.search.suggest.phrase.StupidBackoff; import org.elasticsearch.search.suggest.term.TermSuggestion; import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; +import org.elasticsearch.search.vectors.ExactKnnQueryBuilder; import org.elasticsearch.search.vectors.KnnScoreDocQueryBuilder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.search.vectors.QueryVectorBuilder; @@ -1153,6 +1154,9 @@ private void registerQueryParsers(List plugins) { registerQuery(new QuerySpec<>(KnnScoreDocQueryBuilder.NAME, KnnScoreDocQueryBuilder::new, parser -> { throw new IllegalArgumentException("[score_doc] queries cannot be provided directly"); })); + registerQuery(new QuerySpec<>(ExactKnnQueryBuilder.NAME, ExactKnnQueryBuilder::new, parser -> { + throw new IllegalArgumentException("[exact_knn] queries cannot be provided directly"); + })); registerFromPlugin(plugins, SearchPlugin::getQueries, this::registerQuery); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 8a03c7e9f08ba..d5b2565187a3f 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.CoordinatorRewriteContextProvider; import org.elasticsearch.index.query.InnerHitContextBuilder; +import org.elasticsearch.index.query.InnerHitsRewriteContext; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -1234,13 +1235,19 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc context.size(source.size()); Map innerHitBuilders = new HashMap<>(); QueryBuilder query = source.query(); + InnerHitsRewriteContext innerHitsRewriteContext = new InnerHitsRewriteContext( + context.getSearchExecutionContext().getParserConfig(), + context::getRelativeTimeInMillis + ); if (query != null) { - InnerHitContextBuilder.extractInnerHits(query, innerHitBuilders); + QueryBuilder rewrittenForInnerHits = Rewriteable.rewrite(query, innerHitsRewriteContext, true); + InnerHitContextBuilder.extractInnerHits(rewrittenForInnerHits, innerHitBuilders); searchExecutionContext.setAliasFilter(context.request().getAliasFilter().getQueryBuilder()); context.parsedQuery(searchExecutionContext.toQuery(query)); } if (source.postFilter() != null) { - InnerHitContextBuilder.extractInnerHits(source.postFilter(), innerHitBuilders); + QueryBuilder rewrittenForInnerHits = Rewriteable.rewrite(source.postFilter(), innerHitsRewriteContext, true); + InnerHitContextBuilder.extractInnerHits(rewrittenForInnerHits, innerHitBuilders); context.parsedPostFilter(searchExecutionContext.toQuery(source.postFilter())); } if (innerHitBuilders.size() > 0) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 449326b1d69bb..41b40755dc6e1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; @@ -232,6 +233,14 @@ public int hashCode() { this.downsampledResultsOffset = downsampledResultsOffset; } + boolean versionSupportsDownsamplingTimezone(TransportVersion version) { + return version.onOrAfter(TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ) + || version.between( + TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH, + TransportVersions.NODE_STATS_REQUEST_SIMPLIFIED + ); + } + /** * Stream from a stream. */ @@ -247,7 +256,7 @@ public InternalDateHistogram(StreamInput in) throws IOException { offset = in.readLong(); format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ)) { + if (versionSupportsDownsamplingTimezone(in.getTransportVersion())) { downsampledResultsOffset = in.readBoolean(); } else { downsampledResultsOffset = false; @@ -265,7 +274,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeLong(offset); out.writeNamedWriteable(format); out.writeBoolean(keyed); - if (out.getTransportVersion().onOrAfter(TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ)) { + if (versionSupportsDownsamplingTimezone(out.getTransportVersion())) { out.writeBoolean(downsampledResultsOffset); } out.writeCollection(buckets); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 77cb482edd8b4..fd637e14581ca 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -52,7 +52,7 @@ public InternalTopHits( this.from = from; this.size = size; this.topDocs = topDocs; - this.searchHits = searchHits; + this.searchHits = searchHits.asUnpooled(); } /** @@ -63,7 +63,7 @@ public InternalTopHits(StreamInput in) throws IOException { from = in.readVInt(); size = in.readVInt(); topDocs = Lucene.readTopDocs(in); - searchHits = new SearchHits(in); + searchHits = SearchHits.readFrom(in, false); } @Override @@ -152,8 +152,9 @@ private static SearchHits extractSearchHits( position = tracker[shardIndex]++; } while (topDocsForShard.scoreDocs[position] != scoreDoc); hits[i] = ((InternalTopHits) aggregations.get(shardIndex)).searchHits.getAt(position); + assert hits[i].isPooled() == false; } - return new SearchHits(hits, reducedTopDocs.totalHits, maxScore); + return SearchHits.unpooled(hits, reducedTopDocs.totalHits, maxScore); } private static float reduceAndFindMaxScore(List aggregations, TopDocs[] shardDocs) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java index 8f5d3c1b9f322..c3816bef6f0aa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java @@ -64,7 +64,7 @@ public MedianAbsoluteDeviationAggregationBuilder(String name) { public MedianAbsoluteDeviationAggregationBuilder(StreamInput in) throws IOException { super(in); compression = in.readDouble(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { executionHint = in.readOptionalWriteable(TDigestExecutionHint::readFrom); } else { executionHint = TDigestExecutionHint.HIGH_ACCURACY; @@ -120,7 +120,7 @@ protected ValuesSourceType defaultValueSourceType() { @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeDouble(compression); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(executionHint); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java index d946ce3e14ea1..fedae36be0263 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesConfig.java @@ -130,7 +130,7 @@ public TDigest(double compression, TDigestExecutionHint executionHint) { TDigest(StreamInput in) throws IOException { this( in.readDouble(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(TDigestExecutionHint::readFrom) : TDigestExecutionHint.HIGH_ACCURACY ); @@ -235,7 +235,7 @@ public InternalNumericMetricsAggregation.MultiValue createEmptyPercentileRanksAg public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeDouble(compression); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(executionHint); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java index 23c26794f6bb5..0d0ed21556a92 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java @@ -107,7 +107,7 @@ public final double compression() { public static void write(TDigestState state, StreamOutput out) throws IOException { out.writeDouble(state.compression); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeString(state.type.toString()); out.writeVLong(state.tdigest.size()); } @@ -123,7 +123,7 @@ public static TDigestState read(StreamInput in) throws IOException { double compression = in.readDouble(); TDigestState state; long size = 0; - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { state = new TDigestState(Type.valueOf(in.readString()), compression); size = in.readVLong(); } else { diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 959f75c6a5590..beb6aa4f8385b 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -222,7 +222,7 @@ public SearchSourceBuilder(StreamInput in) throws IOException { indexBoosts = in.readCollectionAsList(IndexBoost::new); minScore = in.readOptionalFloat(); postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { subSearchSourceBuilders = in.readCollectionAsList(SubSearchSourceBuilder::new); } else { QueryBuilder queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); @@ -292,7 +292,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(indexBoosts); out.writeOptionalFloat(minScore); out.writeOptionalNamedWriteable(postQueryBuilder); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeCollection(subSearchSourceBuilders); } else if (out.getTransportVersion().before(TransportVersions.V_8_4_0) && subSearchSourceBuilders.size() >= 2) { throw new IllegalArgumentException("cannot serialize [sub_searches] to version [" + out.getTransportVersion() + "]"); diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index 5d3288408c99b..dab127e8b4e56 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -42,6 +42,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.index.query.AbstractQueryBuilder.DEFAULT_BOOST; + /** * DFS phase of a search request, used to make scoring 100% accurate by collecting additional info from each shard before the query phase. * The additional information is used to better compare the scores coming from all the shards, which depend on local factors (e.g. idf). @@ -181,6 +183,8 @@ private static void executeKnnVectorQuery(SearchContext context) throws IOExcept SearchExecutionContext searchExecutionContext = context.getSearchExecutionContext(); List knnSearch = context.request().source().knnSearch(); List knnVectorQueryBuilders = knnSearch.stream().map(KnnSearchBuilder::toQueryBuilder).toList(); + // Since we apply boost during the DfsQueryPhase, we should not apply boost here: + knnVectorQueryBuilders.forEach(knnVectorQueryBuilder -> knnVectorQueryBuilder.boost(DEFAULT_BOOST)); if (context.request().getAliasFilter().getQueryBuilder() != null) { for (KnnVectorQueryBuilder knnVectorQueryBuilder : knnVectorQueryBuilders) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 91e4fb791f62d..c106d9b6f4cb2 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -82,6 +82,7 @@ public void execute(SearchContext context, int[] docIdsToLoad) { // Only set the shardResults if building search hits was successful if (hits != null) { context.fetchResult().shardResult(hits, profileResult); + hits.decRef(); } } } @@ -173,7 +174,7 @@ protected SearchHit nextDoc(int doc) throws IOException { } TotalHits totalHits = context.getTotalHits(); - return new SearchHits(hits, totalHits, context.getMaxScore()); + return SearchHits.unpooled(hits, totalHits, context.getMaxScore()); } List getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) { @@ -247,11 +248,12 @@ private static HitContext prepareNonNestedHitContext( String id = idLoader.getId(subDocId); if (id == null) { - SearchHit hit = new SearchHit(docId, null); + // TODO: can we use pooled buffers here as well? + SearchHit hit = SearchHit.unpooled(docId, null); Source source = Source.lazy(lazyStoredSourceLoader(profiler, subReaderContext, subDocId)); return new HitContext(hit, subReaderContext, subDocId, Map.of(), source); } else { - SearchHit hit = new SearchHit(docId, id); + SearchHit hit = SearchHit.unpooled(docId, id); Source source; if (requiresSource) { Timer timer = profiler.startLoadingSource(); @@ -328,7 +330,7 @@ private static HitContext prepareNestedHitContext( assert nestedIdentity != null; Source nestedSource = nestedIdentity.extractSource(rootSource); - SearchHit hit = new SearchHit(topDocId, rootId, nestedIdentity); + SearchHit hit = SearchHit.unpooled(topDocId, rootId, nestedIdentity); return new HitContext(hit, subReaderContext, nestedInfo.doc(), childFieldLoader.storedFields(), nestedSource); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index ea5ab13c2e8ee..cc39113f2009f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -70,6 +70,11 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde searchHits[docs[i].index] = nextDoc(docs[i].docId); } } catch (Exception e) { + for (SearchHit searchHit : searchHits) { + if (searchHit != null) { + searchHit.decRef(); + } + } throw new FetchPhaseExecutionException(shardTarget, "Error running fetch phase for doc [" + currentDoc + "]", e); } return searchHits; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index aa5c1f2cbd992..6cf924a239208 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -30,7 +30,12 @@ public final class FetchSearchResult extends SearchPhaseResult { private ProfileResult profileResult; - private final RefCounted refCounted = LeakTracker.wrap(AbstractRefCounted.of(() -> hits = null)); + private final RefCounted refCounted = LeakTracker.wrap(AbstractRefCounted.of(() -> { + if (hits != null) { + hits.decRef(); + hits = null; + } + })); public FetchSearchResult() {} @@ -42,12 +47,13 @@ public FetchSearchResult(ShardSearchContextId id, SearchShardTarget shardTarget) public FetchSearchResult(StreamInput in) throws IOException { super(in); contextId = new ShardSearchContextId(in); - hits = new SearchHits(in); + hits = SearchHits.readFrom(in, true); profileResult = in.readOptionalWriteable(ProfileResult::new); } @Override public void writeTo(StreamOutput out) throws IOException { + assert hasReferences(); contextId.writeTo(out); hits.writeTo(out); out.writeOptionalWriteable(profileResult); @@ -61,6 +67,7 @@ public FetchSearchResult fetchResult() { public void shardResult(SearchHits hits, ProfileResult profileResult) { assert assertNoSearchTarget(hits); this.hits = hits; + hits.incRef(); assert this.profileResult == null; this.profileResult = profileResult; } @@ -73,6 +80,7 @@ private static boolean assertNoSearchTarget(SearchHits hits) { } public SearchHits hits() { + assert hasReferences(); return hits; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index feb0547a32536..ccb54801472a6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -103,7 +103,9 @@ private void hitExecute(Map innerHi searchHitFields.sortValues(fieldDoc.fields, innerHitsContext.sort().formats); } } - results.put(entry.getKey(), fetchResult.hits()); + var h = fetchResult.hits(); + results.put(entry.getKey(), h); + h.mustIncRef(); } } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 18ae708d8fec3..2023ee2e8d4b6 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -284,8 +284,7 @@ public ShardSearchRequest(StreamInput in) throws IOException { numberOfShards = in.readVInt(); scroll = in.readOptionalWriteable(Scroll::new); source = in.readOptionalWriteable(SearchSourceBuilder::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) - && in.getTransportVersion().before(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) && in.getTransportVersion().before(TransportVersions.V_8_9_X)) { // to deserialize between the 8.8 and 8.500.020 version we need to translate // the rank queries into sub searches if we are ranking; if there are no rank queries // we deserialize the empty list and do nothing @@ -360,8 +359,7 @@ protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOExce } out.writeOptionalWriteable(scroll); out.writeOptionalWriteable(source); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) - && out.getTransportVersion().before(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0) && out.getTransportVersion().before(TransportVersions.V_8_9_X)) { // to serialize between the 8.8 and 8.500.020 version we need to translate // the sub searches into rank queries if we are ranking, otherwise, we // ignore this because linear combination will have multiple sub searches in diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java index 76ee7e09ad870..4c42daba22b7a 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorerBuilder.java @@ -73,6 +73,8 @@ public static RescorerBuilder parseFromXContent(XContentParser parser, Consum RescorerBuilder rescorer = null; Integer windowSize = null; XContentParser.Token token; + String rescorerType = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); @@ -83,8 +85,11 @@ public static RescorerBuilder parseFromXContent(XContentParser parser, Consum throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); } } else if (token == XContentParser.Token.START_OBJECT) { - rescorer = parser.namedObject(RescorerBuilder.class, fieldName, null); - rescorerNameConsumer.accept(fieldName); + if (fieldName != null) { + rescorer = parser.namedObject(RescorerBuilder.class, fieldName, null); + rescorerNameConsumer.accept(fieldName); + rescorerType = fieldName; + } } else { throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); } @@ -92,9 +97,13 @@ public static RescorerBuilder parseFromXContent(XContentParser parser, Consum if (rescorer == null) { throw new ParsingException(parser.getTokenLocation(), "missing rescore type"); } + if (windowSize != null) { rescorer.windowSize(windowSize.intValue()); + } else if (rescorer.isWindowSizeRequired()) { + throw new ParsingException(parser.getTokenLocation(), "window_size is required for rescorer of type [" + rescorerType + "]"); } + return rescorer; } @@ -111,11 +120,21 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; + /** + * Indicate if the window_size is a required parameter for the rescorer. + */ + protected boolean isWindowSizeRequired() { + return false; + } + /** * Build the {@linkplain RescoreContext} that will be used to actually * execute the rescore against a particular shard. */ public final RescoreContext buildContext(SearchExecutionContext context) throws IOException { + if (isWindowSizeRequired()) { + assert windowSize != null; + } int finalWindowSize = windowSize == null ? DEFAULT_WINDOW_SIZE : windowSize; RescoreContext rescoreContext = innerBuildContext(finalWindowSize, context); return rescoreContext; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java index 40ff9c6eaf6ee..7210c35d961ac 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestion.java @@ -271,7 +271,9 @@ public Option(int docID, Text text, float score, Map> contex public Option(StreamInput in) throws IOException { super(in); this.doc = Lucene.readScoreDoc(in); - this.hit = in.readOptionalWriteable(SearchHit::readFrom); + if (in.readBoolean()) { + this.hit = SearchHit.readFrom(in, false); + } int contextSize = in.readInt(); this.contexts = Maps.newLinkedHashMapWithExpectedSize(contextSize); for (int i = 0; i < contextSize; i++) { @@ -309,7 +311,7 @@ public void setShardIndex(int shardIndex) { } public void setHit(SearchHit hit) { - this.hit = hit; + this.hit = hit == null ? null : hit.asUnpooled(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java new file mode 100644 index 0000000000000..d292f61dcb085 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.vectors; + +import org.apache.lucene.search.Query; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +/** + * Exact knn query builder. Will iterate and score all documents that have the provided knn field in the index. + * Useful in inner hits scoring scenarios. + */ +public class ExactKnnQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "exact_knn"; + private final String field; + private final float[] query; + + /** + * Creates a query builder. + * + * @param query the query vector + * @param field the field that was used for the kNN query + */ + public ExactKnnQueryBuilder(float[] query, String field) { + this.query = query; + this.field = field; + } + + public ExactKnnQueryBuilder(StreamInput in) throws IOException { + super(in); + this.query = in.readFloatArray(); + this.field = in.readString(); + } + + String getField() { + return field; + } + + float[] getQuery() { + return query; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeFloatArray(query); + out.writeString(field); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.field("query", query); + builder.field("field", field); + boostAndQueryNameToXContent(builder); + builder.endObject(); + } + + @Override + protected Query doToQuery(SearchExecutionContext context) throws IOException { + final MappedFieldType fieldType = context.getFieldType(field); + if (fieldType == null) { + throw new IllegalArgumentException("field [" + field + "] does not exist in the mapping"); + } + if (fieldType instanceof DenseVectorFieldMapper.DenseVectorFieldType == false) { + throw new IllegalArgumentException( + "[" + NAME + "] queries are only supported on [" + DenseVectorFieldMapper.CONTENT_TYPE + "] fields" + ); + } + final DenseVectorFieldMapper.DenseVectorFieldType vectorFieldType = (DenseVectorFieldMapper.DenseVectorFieldType) fieldType; + return vectorFieldType.createExactKnnQuery(query); + } + + @Override + protected boolean doEquals(ExactKnnQueryBuilder other) { + return field.equals(other.field) && Arrays.equals(query, other.query); + } + + @Override + protected int doHashCode() { + return Objects.hash(field, Arrays.hashCode(query)); + } + + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + return this; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.NESTED_KNN_MORE_INNER_HITS; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java index 13ca1d3dc1db2..ea9b2df942808 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java @@ -35,6 +35,8 @@ public class KnnScoreDocQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "knn_score_doc"; private final ScoreDoc[] scoreDocs; + private final String fieldName; + private final float[] queryVector; /** * Creates a query builder. @@ -42,13 +44,26 @@ public class KnnScoreDocQueryBuilder extends AbstractQueryBuilder rewrittenQueries = new ArrayList<>(filterQueries.size()); for (QueryBuilder query : filterQueries) { @@ -260,6 +263,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException { DenseVectorFieldType vectorFieldType = (DenseVectorFieldType) fieldType; String parentPath = context.nestedLookup().getNestedParent(fieldName); + if (parentPath != null) { NestedObjectMapper originalObjectMapper = context.nestedScope().getObjectMapper(); if (originalObjectMapper != null) { diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java index 377c7b3847b0b..d1efb51e36856 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java @@ -67,7 +67,7 @@ public static TaskInfo from(StreamInput in) throws IOException { return new TaskInfo( taskId, in.readString(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) ? in.readString() : taskId.getNodeId(), + in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) ? in.readString() : taskId.getNodeId(), in.readString(), in.readOptionalString(), in.readOptionalNamedWriteable(Task.Status.class), @@ -84,7 +84,7 @@ public static TaskInfo from(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { taskId.writeTo(out); out.writeString(type); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeString(node); } out.writeString(action); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java index fd5c39ec5fb1f..9e68557c05de6 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java @@ -39,7 +39,7 @@ */ public class RemoteClusterPortSettings { - public static final TransportVersion TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY = TransportVersions.V_8_500_061; + public static final TransportVersion TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY = TransportVersions.V_8_10_X; public static final String REMOTE_CLUSTER_PROFILE = "_remote_cluster"; public static final String REMOTE_CLUSTER_PREFIX = "remote_cluster."; diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json index d8b4ed1ff93c9..46e32300e70fd 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.json @@ -29,5 +29,6 @@ "BOOTSTRAP_CHECK_ROLE_MAPPINGS": "bootstrap-checks-xpack.html#_role_mappings_check", "BOOTSTRAP_CHECK_TLS": "bootstrap-checks-xpack.html#bootstrap-checks-tls", "BOOTSTRAP_CHECK_TOKEN_SSL": "bootstrap-checks-xpack.html#_token_ssl_check", - "BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP": "security-minimal-setup.html" + "BOOTSTRAP_CHECK_SECURITY_MINIMAL_SETUP": "security-minimal-setup.html", + "CONTACT_SUPPORT": "troubleshooting.html#troubleshooting-contact-support" } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java index 38c811d367560..9c7fa266a0762 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/coordination/ClusterFormationInfoActionTests.java @@ -69,6 +69,7 @@ private ClusterFormationInfoAction.Response mutateResponse(ClusterFormationInfoA clusterFormationState.lastCommittedConfiguration(), clusterFormationState.resolvedAddresses(), clusterFormationState.foundPeers(), + clusterFormationState.mastersOfPeers(), clusterFormationState.currentTerm(), clusterFormationState.hasDiscoveredQuorum(), clusterFormationState.statusInfo(), @@ -88,6 +89,7 @@ private ClusterFormationInfoAction.Response mutateResponse(ClusterFormationInfoA clusterFormationState.lastCommittedConfiguration(), clusterFormationState.resolvedAddresses(), clusterFormationState.foundPeers(), + clusterFormationState.mastersOfPeers(), clusterFormationState.currentTerm(), clusterFormationState.hasDiscoveredQuorum(), clusterFormationState.statusInfo(), @@ -107,6 +109,7 @@ private ClusterFormationInfoAction.Response mutateResponse(ClusterFormationInfoA clusterFormationState.lastCommittedConfiguration(), clusterFormationState.resolvedAddresses(), clusterFormationState.foundPeers(), + clusterFormationState.mastersOfPeers(), clusterFormationState.currentTerm(), clusterFormationState.hasDiscoveredQuorum() == false, clusterFormationState.statusInfo(), @@ -148,6 +151,7 @@ private ClusterFormationFailureHelper.ClusterFormationState getClusterFormationS new CoordinationMetadata.VotingConfiguration(Collections.emptySet()), Collections.emptyList(), Collections.emptyList(), + Collections.emptySet(), randomLong(), randomBoolean(), new StatusInfo(randomFrom(StatusInfo.Status.HEALTHY, StatusInfo.Status.UNHEALTHY), randomAlphaOfLength(20)), diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 345f85470a056..adefd71f93590 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionTestUtils; @@ -289,7 +288,7 @@ public void onFailure(Exception e) { request.setReason("Testing Cancellation"); request.setTargetTaskId(new TaskId(testNodes[0].getNodeId(), mainTask.getId())); // And send the cancellation request to a random node - CancelTasksResponse response = ActionTestUtils.executeBlocking( + ListTasksResponse response = ActionTestUtils.executeBlocking( testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction, request ); @@ -368,7 +367,7 @@ public void onFailure(Exception e) { request.setReason("Testing Cancellation"); request.setTargetParentTaskId(new TaskId(testNodes[0].getNodeId(), mainTask.getId())); // And send the cancellation request to a random node - CancelTasksResponse response = ActionTestUtils.executeBlocking( + ListTasksResponse response = ActionTestUtils.executeBlocking( testNodes[randomIntBetween(1, testNodes.length - 1)].transportCancelTasksAction, request ); @@ -487,7 +486,7 @@ public void onFailure(Exception e) { request.setReason("Testing Cancellation"); request.setTargetTaskId(new TaskId(testNodes[0].getNodeId(), mainTask.getId())); // And send the cancellation request to a random node - CancelTasksResponse response = ActionTestUtils.executeBlocking(testNodes[0].transportCancelTasksAction, request); + ListTasksResponse response = ActionTestUtils.executeBlocking(testNodes[0].transportCancelTasksAction, request); logger.info("--> Done simulating issuing cancel request on the node that is about to leave the cluster"); // This node still thinks that's part of the cluster, so cancelling should look successful assertThat(response.getTasks().size(), lessThanOrEqualTo(1)); @@ -544,7 +543,7 @@ public void testNonExistingTaskCancellation() throws Exception { randomSubsetOf(randomIntBetween(1, testNodes.length - 1), testNodes).stream().map(TestNode::getNodeId).toArray(String[]::new) ); // And send the cancellation request to a random node - CancelTasksResponse response = ActionTestUtils.executeBlocking( + ListTasksResponse response = ActionTestUtils.executeBlocking( testNodes[randomIntBetween(1, testNodes.length - 1)].transportCancelTasksAction, request ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 86ccd9807cf9f..7168b2c1edcdd 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; @@ -521,7 +520,7 @@ public void testCancellingTasksThatDontSupportCancellation() throws Exception { request.setNodes(testNodes[0].getNodeId()); request.setReason("Testing Cancellation"); request.setActions(actionName); - CancelTasksResponse response = ActionTestUtils.executeBlocking( + ListTasksResponse response = ActionTestUtils.executeBlocking( testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction, request ); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java deleted file mode 100644 index 4f3b14cd986c1..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheResponseTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.cache.clear; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.test.AbstractBroadcastResponseTestCase; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; - -public class ClearIndicesCacheResponseTests extends AbstractBroadcastResponseTestCase { - - @Override - protected ClearIndicesCacheResponse createTestInstance( - int totalShards, - int successfulShards, - int failedShards, - List failures - ) { - return new ClearIndicesCacheResponse(totalShards, successfulShards, failedShards, failures); - } - - @Override - protected ClearIndicesCacheResponse doParseInstance(XContentParser parser) { - return ClearIndicesCacheResponse.fromXContent(parser); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java deleted file mode 100644 index 62611060ce25d..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/flush/FlushResponseTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.flush; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.test.AbstractBroadcastResponseTestCase; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; - -public class FlushResponseTests extends AbstractBroadcastResponseTestCase { - - @Override - protected FlushResponse createTestInstance( - int totalShards, - int successfulShards, - int failedShards, - List failures - ) { - return new FlushResponse(totalShards, successfulShards, failedShards, failures); - } - - @Override - protected FlushResponse doParseInstance(XContentParser parser) { - return FlushResponse.fromXContent(parser); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java deleted file mode 100644 index ed1160edeb8f5..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeResponseTests.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.forcemerge; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.test.AbstractBroadcastResponseTestCase; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; - -public class ForceMergeResponseTests extends AbstractBroadcastResponseTestCase { - @Override - protected ForceMergeResponse createTestInstance( - int totalShards, - int successfulShards, - int failedShards, - List failures - ) { - return new ForceMergeResponse(totalShards, successfulShards, failedShards, failures); - } - - @Override - protected ForceMergeResponse doParseInstance(XContentParser parser) { - return ForceMergeResponse.fromXContent(parser); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java deleted file mode 100644 index 5a3183b3e61b9..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/refresh/RefreshResponseTests.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.refresh; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.test.AbstractBroadcastResponseTestCase; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; - -public class RefreshResponseTests extends AbstractBroadcastResponseTestCase { - - @Override - protected RefreshResponse createTestInstance( - int totalShards, - int successfulShards, - int failedShards, - List failures - ) { - return new RefreshResponse(totalShards, successfulShards, failedShards, failures); - } - - @Override - protected RefreshResponse doParseInstance(XContentParser parser) { - return RefreshResponse.fromXContent(parser); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponseTests.java deleted file mode 100644 index b9f3e8b89a214..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeResponseTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.indices.shrink; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractXContentSerializingTestCase; -import org.elasticsearch.xcontent.XContentParser; - -public class ResizeResponseTests extends AbstractXContentSerializingTestCase { - - public void testToXContent() { - ResizeResponse response = new ResizeResponse(true, false, "index_name"); - String output = Strings.toString(response); - assertEquals(""" - {"acknowledged":true,"shards_acknowledged":false,"index":"index_name"}""", output); - } - - @Override - protected ResizeResponse doParseInstance(XContentParser parser) { - return ResizeResponse.fromXContent(parser); - } - - @Override - protected ResizeResponse createTestInstance() { - boolean acknowledged = randomBoolean(); - boolean shardsAcknowledged = acknowledged && randomBoolean(); - String index = randomAlphaOfLength(5); - return new ResizeResponse(acknowledged, shardsAcknowledged, index); - } - - @Override - protected Writeable.Reader instanceReader() { - return ResizeResponse::new; - } - - @Override - protected ResizeResponse mutateInstance(ResizeResponse response) { - if (randomBoolean()) { - if (randomBoolean()) { - boolean acknowledged = response.isAcknowledged() == false; - boolean shardsAcknowledged = acknowledged && response.isShardsAcknowledged(); - return new ResizeResponse(acknowledged, shardsAcknowledged, response.index()); - } else { - boolean shardsAcknowledged = response.isShardsAcknowledged() == false; - boolean acknowledged = shardsAcknowledged || response.isAcknowledged(); - return new ResizeResponse(acknowledged, shardsAcknowledged, response.index()); - } - } else { - return new ResizeResponse( - response.isAcknowledged(), - response.isShardsAcknowledged(), - response.index() + randomAlphaOfLengthBetween(2, 5) - ); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index 1276f6c2db58b..75833052dd4c8 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -143,8 +143,7 @@ void createIndex(String index, TimeValue timeout, ActionListener(10); - try { + try (var result = new ArraySearchPhaseResults<>(10)) { AbstractSearchAsyncAction action = createAction(new SearchRequest(), result, null, controlled, expected); final long actual = action.buildTookInMillis(); if (controlled) { @@ -145,16 +144,13 @@ private void runTestTook(final boolean controlled) { // with a real clock, the best we can say is that it took as long as we spun for assertThat(actual, greaterThanOrEqualTo(TimeUnit.NANOSECONDS.toMillis(expected.get()))); } - } finally { - result.decRef(); } } public void testBuildShardSearchTransportRequest() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(randomBoolean()); final AtomicLong expected = new AtomicLong(); - var result = new ArraySearchPhaseResults<>(10); - try { + try (var result = new ArraySearchPhaseResults<>(10)) { AbstractSearchAsyncAction action = createAction(searchRequest, result, null, false, expected); String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); SearchShardIterator iterator = new SearchShardIterator( @@ -170,8 +166,6 @@ public void testBuildShardSearchTransportRequest() { assertEquals(2.0f, shardSearchTransportRequest.indexBoost(), 0.0f); assertArrayEquals(new String[] { "name", "name1" }, shardSearchTransportRequest.indices()); assertEquals(clusterAlias, shardSearchTransportRequest.getClusterAlias()); - } finally { - result.decRef(); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java index 33e6096bab763..4a7d0cc8208e2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java @@ -41,8 +41,9 @@ public void testProgressListenerExceptionsAreCaught() throws Exception { ); searchProgressListener.notifyListShards(searchShards, Collections.emptyList(), SearchResponse.Clusters.EMPTY, false, timeProvider); - CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(searchProgressListener, 10); - try { + try ( + CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(searchProgressListener, 10) + ) { AtomicInteger nextCounter = new AtomicInteger(0); for (int i = 0; i < 10; i++) { SearchShardTarget searchShardTarget = new SearchShardTarget("node", new ShardId("index", "uuid", i), null); @@ -58,14 +59,16 @@ public void testProgressListenerExceptionsAreCaught() throws Exception { queryPhaseResultConsumer.reduce(); assertEquals(1, searchProgressListener.onFinalReduce.get()); assertEquals(10, nextCounter.get()); - } finally { - queryPhaseResultConsumer.decRef(); } } public void testNullShardResultHandling() throws Exception { - CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(SearchProgressListener.NOOP, 10); - try { + try ( + CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer( + SearchProgressListener.NOOP, + 10 + ) + ) { AtomicInteger nextCounter = new AtomicInteger(0); for (int i = 0; i < 10; i++) { SearchShardTarget searchShardTarget = new SearchShardTarget("node", new ShardId("index", "uuid", i), null); @@ -79,20 +82,20 @@ public void testNullShardResultHandling() throws Exception { assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); assertFalse(reducePhase.isEmptyResult()); assertEquals(10, nextCounter.get()); - } finally { - queryPhaseResultConsumer.decRef(); } } public void testEmptyResults() throws Exception { - CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer(SearchProgressListener.NOOP, 10); - try { + try ( + CountOnlyQueryPhaseResultConsumer queryPhaseResultConsumer = new CountOnlyQueryPhaseResultConsumer( + SearchProgressListener.NOOP, + 10 + ) + ) { var reducePhase = queryPhaseResultConsumer.reduce(); assertEquals(0, reducePhase.totalHits().value); assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); assertTrue(reducePhase.isEmptyResult()); - } finally { - queryPhaseResultConsumer.decRef(); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java index 838e13d6026c7..bc31f5f92f9b5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java @@ -23,8 +23,7 @@ public class CountedCollectorTests extends ESTestCase { public void testCollect() throws InterruptedException { - ArraySearchPhaseResults consumer = new ArraySearchPhaseResults<>(randomIntBetween(1, 100)); - try { + try (ArraySearchPhaseResults consumer = new ArraySearchPhaseResults<>(randomIntBetween(1, 100))) { List state = new ArrayList<>(); int numResultsExpected = randomIntBetween(1, consumer.getAtomicArray().length()); MockSearchPhaseContext context = new MockSearchPhaseContext(consumer.getAtomicArray().length()); @@ -93,8 +92,6 @@ public void testCollect() throws InterruptedException { for (int i = numResultsExpected; i < results.length(); i++) { assertNull("index: " + i, results.get(i)); } - } finally { - consumer.decRef(); } } } diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index b14d24cf95f62..e9ff8336ef4c9 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -125,16 +125,17 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.searchRequest, - results.length(), - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.searchRequest, + results.length(), + exc -> {} + ) + ) { DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { @@ -155,8 +156,6 @@ public void run() throws IOException { assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); - } finally { - consumer.decRef(); } } @@ -211,16 +210,17 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.searchRequest, - results.length(), - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.searchRequest, + results.length(), + exc -> {} + ) + ) { DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { @@ -243,8 +243,6 @@ public void run() throws IOException { assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(new ShardSearchContextId("", 2L))); assertNull(responseRef.get().get(1)); - } finally { - consumer.decRef(); } } @@ -299,16 +297,17 @@ public void sendExecuteQuery( SearchPhaseController searchPhaseController = searchPhaseController(); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.searchRequest, - results.length(), - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.searchRequest, + results.length(), + exc -> {} + ) + ) { DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, null, consumer, (response) -> new SearchPhase("test") { @Override public void run() throws IOException { @@ -320,8 +319,6 @@ public void run() throws IOException { assertThat(mockSearchPhaseContext.failures, hasSize(1)); assertThat(mockSearchPhaseContext.failures.get(0).getCause(), instanceOf(UncheckedIOException.class)); assertThat(mockSearchPhaseContext.releasedSearchContexts, hasSize(1)); // phase execution will clean up on the contexts - } finally { - consumer.decRef(); } } @@ -339,15 +336,28 @@ public void testRewriteShardSearchRequestWithRank() { QueryBuilder bm25 = new TermQueryBuilder("field", "term"); SearchSourceBuilder ssb = new SearchSourceBuilder().query(bm25) - .knnSearch(List.of(new KnnSearchBuilder("vector", new float[] { 0.0f }, 10, 100, null))) + .knnSearch( + List.of( + new KnnSearchBuilder("vector", new float[] { 0.0f }, 10, 100, null), + new KnnSearchBuilder("vector2", new float[] { 0.0f }, 10, 100, null) + ) + ) .rankBuilder(new TestRankBuilder(100)); SearchRequest sr = new SearchRequest().allowPartialSearchResults(true).source(ssb); ShardSearchRequest ssr = new ShardSearchRequest(null, sr, new ShardId("test", "testuuid", 1), 1, 1, null, 1.0f, 0, null); dqp.rewriteShardSearchRequest(ssr); - KnnScoreDocQueryBuilder ksdqb0 = new KnnScoreDocQueryBuilder(new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1) }); - KnnScoreDocQueryBuilder ksdqb1 = new KnnScoreDocQueryBuilder(new ScoreDoc[] { new ScoreDoc(1, 2.0f, 1) }); + KnnScoreDocQueryBuilder ksdqb0 = new KnnScoreDocQueryBuilder( + new ScoreDoc[] { new ScoreDoc(1, 3.0f, 1), new ScoreDoc(4, 1.5f, 1) }, + "vector", + new float[] { 0.0f } + ); + KnnScoreDocQueryBuilder ksdqb1 = new KnnScoreDocQueryBuilder( + new ScoreDoc[] { new ScoreDoc(1, 2.0f, 1) }, + "vector2", + new float[] { 0.0f } + ); assertEquals( List.of(bm25, ksdqb0, ksdqb1), List.of( diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 648cb8aa60158..0a98b12444f9c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -42,8 +42,8 @@ public void testCollapseSingleHit() throws IOException { final int numInnerHits = randomIntBetween(1, 5); List collapsedHits = new ArrayList<>(numInnerHits); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { - SearchHits hits = new SearchHits( - new SearchHit[] { new SearchHit(innerHitNum, "ID"), new SearchHit(innerHitNum + 1, "ID") }, + SearchHits hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(innerHitNum, "ID"), SearchHit.unpooled(innerHitNum + 1, "ID") }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F ); @@ -98,6 +98,8 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL sections.decRef(); } mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null)); + // transferring ownership to the multi-search response so no need to release here + mockSearchPhaseContext.searchResponse.set(null); } ActionListener.respondAndRelease( @@ -110,37 +112,43 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL SearchHit hit = new SearchHit(1, "ID"); hit.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue))); SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { - @Override - public void run() { - var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); - try { - mockSearchPhaseContext.sendSearchResponse(sections, null); - } finally { - sections.decRef(); + try { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); + try { + mockSearchPhaseContext.sendSearchResponse(sections, null); + } finally { + sections.decRef(); + } } - } - }); + }); - phase.run(); - mockSearchPhaseContext.assertNoFailure(); - SearchResponse theResponse = mockSearchPhaseContext.searchResponse.get(); - assertNotNull(theResponse); - assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size()); + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + SearchResponse theResponse = mockSearchPhaseContext.searchResponse.get(); + assertNotNull(theResponse); + assertEquals(numInnerHits, theResponse.getHits().getHits()[0].getInnerHits().size()); - for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { - assertSame( - theResponse.getHits().getHits()[0].getInnerHits().get("innerHit" + innerHitNum), - collapsedHits.get(innerHitNum) - ); - } + for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { + assertSame( + theResponse.getHits().getHits()[0].getInnerHits().get("innerHit" + innerHitNum), + collapsedHits.get(innerHitNum) + ); + } - assertTrue(executedMultiSearch.get()); + assertTrue(executedMultiSearch.get()); + } finally { + hits.decRef(); + } } finally { + mockSearchPhaseContext.execute(() -> {}); var resp = mockSearchPhaseContext.searchResponse.get(); if (resp != null) { resp.decRef(); } + } } } @@ -198,22 +206,28 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL SearchHit hit2 = new SearchHit(2, "ID2"); hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue))); SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { - @Override - public void run() { - var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); - try { - mockSearchPhaseContext.sendSearchResponse(sections, null); - } finally { - sections.decRef(); + try { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); + try { + mockSearchPhaseContext.sendSearchResponse(sections, null); + } finally { + sections.decRef(); + } } - } - }); - phase.run(); - assertThat(mockSearchPhaseContext.phaseFailure.get(), Matchers.instanceOf(RuntimeException.class)); - assertEquals("boom", mockSearchPhaseContext.phaseFailure.get().getMessage()); - assertNotNull(mockSearchPhaseContext.phaseFailure.get()); - assertNull(mockSearchPhaseContext.searchResponse.get()); + }); + phase.run(); + assertThat(mockSearchPhaseContext.phaseFailure.get(), Matchers.instanceOf(RuntimeException.class)); + assertEquals("boom", mockSearchPhaseContext.phaseFailure.get().getMessage()); + assertNotNull(mockSearchPhaseContext.phaseFailure.get()); + assertNull(mockSearchPhaseContext.searchResponse.get()); + } finally { + mockSearchPhaseContext.execute(() -> {}); + hits.decRef(); + collapsedHits.decRef(); + } } public void testSkipPhase() throws IOException { @@ -231,21 +245,26 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL SearchHit hit2 = new SearchHit(2, "ID2"); hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(null))); SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { - @Override - public void run() { - var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); - try { - mockSearchPhaseContext.sendSearchResponse(sections, null); - } finally { - sections.decRef(); + try { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { + @Override + public void run() { + var sections = new SearchResponseSections(hits, null, null, false, null, null, 1); + try { + mockSearchPhaseContext.sendSearchResponse(sections, null); + } finally { + sections.decRef(); + } } - } - }); - phase.run(); - mockSearchPhaseContext.assertNoFailure(); - assertNotNull(mockSearchPhaseContext.searchResponse.get()); + }); + phase.run(); + mockSearchPhaseContext.assertNoFailure(); + assertNotNull(mockSearchPhaseContext.searchResponse.get()); + } finally { + hits.decRef(); + } } finally { + mockSearchPhaseContext.execute(() -> {}); var resp = mockSearchPhaseContext.searchResponse.get(); if (resp != null) { resp.decRef(); @@ -328,6 +347,7 @@ public void run() { phase.run(); mockSearchPhaseContext.assertNoFailure(); assertNotNull(mockSearchPhaseContext.searchResponse.get()); + mockSearchPhaseContext.execute(() -> {}); } finally { var resp = mockSearchPhaseContext.searchResponse.get(); if (resp != null) { diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java index 035d01108d655..95a4efcca5fa2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java @@ -52,9 +52,11 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL phase.run(); } finally { sections.decRef(); + hits.decRef(); } searchPhaseContext.assertNoFailure(); assertNotNull(searchPhaseContext.searchResponse.get()); + searchPhaseContext.execute(() -> {}); } finally { var resp = searchPhaseContext.searchResponse.get(); if (resp != null) { @@ -126,6 +128,7 @@ void sendExecuteMultiSearch( ), null ); + searchHits.decRef(); } ActionListener.respondAndRelease(listener, new MultiSearchResponse(responses, randomNonNegativeLong())); } @@ -192,6 +195,7 @@ void sendExecuteMultiSearch( phase.run(); } finally { sections.decRef(); + searchHits.decRef(); } assertTrue(requestSent.get()); searchPhaseContext.assertNoFailure(); @@ -220,6 +224,7 @@ void sendExecuteMultiSearch( leftHit1.field("lookup_field_3").getValues(), contains(Map.of("field_a", List.of("a2"), "field_b", List.of("b1", "b2"))) ); + searchPhaseContext.execute(() -> {}); } finally { var resp = searchPhaseContext.searchResponse.get(); if (resp != null) { diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 4594810da575a..a2c5bed51f5e7 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -50,16 +50,17 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testShortcutQueryAndFetchOptimization() { SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); - SearchPhaseResults results = controller.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.getRequest(), - 1, - exc -> {} - ); - try { + try ( + SearchPhaseResults results = controller.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.getRequest(), + 1, + exc -> {} + ) + ) { boolean hasHits = randomBoolean(); boolean profiled = hasHits && randomBoolean(); final int numHits; @@ -78,8 +79,8 @@ public void testShortcutQueryAndFetchOptimization() { FetchSearchResult fetchResult = new FetchSearchResult(); try { fetchResult.setSearchShardTarget(queryResult.getSearchShardTarget()); - SearchHits hits = new SearchHits( - new SearchHit[] { new SearchHit(42) }, + SearchHits hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F ); @@ -126,7 +127,6 @@ public void run() { if (resp != null) { resp.decRef(); } - results.decRef(); } } @@ -144,16 +144,17 @@ private void assertProfiles(boolean profiled, int totalShards, SearchResponse se public void testFetchTwoDocument() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - SearchPhaseResults results = controller.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.getRequest(), - 2, - exc -> {} - ); - try { + try ( + SearchPhaseResults results = controller.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.getRequest(), + 2, + exc -> {} + ) + ) { int resultSetSize = randomIntBetween(2, 10); boolean profiled = randomBoolean(); @@ -209,16 +210,16 @@ public void sendExecuteFetch( SearchHits hits; if (request.contextId().equals(ctx2)) { fetchResult.setSearchShardTarget(shard2Target); - hits = new SearchHits( - new SearchHit[] { new SearchHit(84) }, + hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F ); } else { assertEquals(ctx1, request.contextId()); fetchResult.setSearchShardTarget(shard1Target); - hits = new SearchHits( - new SearchHit[] { new SearchHit(42) }, + hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F ); @@ -258,23 +259,23 @@ public void run() { if (resp != null) { resp.decRef(); } - results.decRef(); } } public void testFailFetchOneDoc() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - SearchPhaseResults results = controller.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.getRequest(), - 2, - exc -> {} - ); - try { + try ( + SearchPhaseResults results = controller.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.getRequest(), + 2, + exc -> {} + ) + ) { int resultSetSize = randomIntBetween(2, 10); boolean profiled = randomBoolean(); @@ -327,8 +328,8 @@ public void sendExecuteFetch( FetchSearchResult fetchResult = new FetchSearchResult(); try { fetchResult.setSearchShardTarget(shard1Target); - SearchHits hits = new SearchHits( - new SearchHit[] { new SearchHit(84) }, + SearchHits hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F ); @@ -386,7 +387,6 @@ public void run() { if (resp != null) { resp.decRef(); } - results.decRef(); } } @@ -397,16 +397,17 @@ public void testFetchDocsConcurrently() throws InterruptedException { boolean profiled = randomBoolean(); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); - SearchPhaseResults results = controller.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.getRequest(), - numHits, - exc -> {} - ); - try { + try ( + SearchPhaseResults results = controller.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.getRequest(), + numHits, + exc -> {} + ) + ) { SearchShardTarget[] shardTargets = new SearchShardTarget[numHits]; for (int i = 0; i < numHits; i++) { shardTargets[i] = new SearchShardTarget("node1", new ShardId("test", "na", i), null); @@ -439,8 +440,8 @@ public void sendExecuteFetch( FetchSearchResult fetchResult = new FetchSearchResult(); try { fetchResult.setSearchShardTarget(shardTargets[(int) request.contextId().getId()]); - SearchHits hits = new SearchHits( - new SearchHit[] { new SearchHit((int) (request.contextId().getId() + 1)) }, + SearchHits hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled((int) (request.contextId().getId() + 1)) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 100F ); @@ -505,23 +506,23 @@ public void run() { if (resp != null) { resp.decRef(); } - results.decRef(); } } public void testExceptionFailsPhase() { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - SearchPhaseResults results = controller.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.getRequest(), - 2, - exc -> {} - ); - try { + try ( + SearchPhaseResults results = controller.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.getRequest(), + 2, + exc -> {} + ) + ) { int resultSetSize = randomIntBetween(2, 10); boolean profiled = randomBoolean(); @@ -578,16 +579,16 @@ public void sendExecuteFetch( SearchHits hits; if (request.contextId().getId() == 321) { fetchResult.setSearchShardTarget(shard2Target); - hits = new SearchHits( - new SearchHit[] { new SearchHit(84) }, + hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F ); } else { fetchResult.setSearchShardTarget(shard1Target); assertEquals(request, 123); - hits = new SearchHits( - new SearchHit[] { new SearchHit(42) }, + hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(42) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F ); @@ -620,23 +621,23 @@ public void run() { if (resp != null) { resp.decRef(); } - results.decRef(); } } public void testCleanupIrrelevantContexts() { // contexts that are not fetched should be cleaned up MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); SearchPhaseController controller = new SearchPhaseController((t, s) -> InternalAggregationTestCase.emptyReduceContextBuilder()); - SearchPhaseResults results = controller.newSearchPhaseResults( - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - mockSearchPhaseContext.getRequest(), - 2, - exc -> {} - ); - try { + try ( + SearchPhaseResults results = controller.newSearchPhaseResults( + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + mockSearchPhaseContext.getRequest(), + 2, + exc -> {} + ) + ) { int resultSetSize = 1; boolean profiled = randomBoolean(); @@ -689,8 +690,8 @@ public void sendExecuteFetch( try { if (request.contextId().getId() == 321) { fetchResult.setSearchShardTarget(shard1Target); - SearchHits hits = new SearchHits( - new SearchHit[] { new SearchHit(84) }, + SearchHits hits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(84) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F ); @@ -740,7 +741,6 @@ public void run() { if (resp != null) { resp.decRef(); } - results.decRef(); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 1a510058e3bbd..ed807091ae9a2 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -98,6 +98,8 @@ public void sendSearchResponse(SearchResponseSections internalSearchResponse, At searchContextId ) ); + Releasables.close(releasables); + releasables.clear(); if (existing != null) { existing.decRef(); } @@ -147,12 +149,7 @@ public void addReleasable(Releasable releasable) { @Override public void execute(Runnable command) { - try { - command.run(); - } finally { - Releasables.close(releasables); - releasables.clear(); - } + command.run(); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java index 7e1e7de03e288..91bf1059225d8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/OpenPointInTimeRequestTests.java @@ -96,7 +96,7 @@ protected OpenPointInTimeRequest mutateInstance(OpenPointInTimeRequest in) throw } public void testUseDefaultConcurrentForOldVersion() throws Exception { - TransportVersion previousVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_500_020); + TransportVersion previousVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.V_8_9_X); try (BytesStreamOutput output = new BytesStreamOutput()) { TransportVersion version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_0_0, previousVersion); output.setTransportVersion(version); diff --git a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java index 6035950ca4635..db32213ff97b7 100644 --- a/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/QueryPhaseResultConsumerTests.java @@ -103,20 +103,21 @@ public void testProgressListenerExceptionsAreCaught() throws Exception { SearchRequest searchRequest = new SearchRequest("index"); searchRequest.setBatchedReduceSize(2); AtomicReference onPartialMergeFailure = new AtomicReference<>(); - QueryPhaseResultConsumer queryPhaseResultConsumer = new QueryPhaseResultConsumer( - searchRequest, - executor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - searchPhaseController, - () -> false, - searchProgressListener, - 10, - e -> onPartialMergeFailure.accumulateAndGet(e, (prev, curr) -> { - curr.addSuppressed(prev); - return curr; - }) - ); - try { + try ( + QueryPhaseResultConsumer queryPhaseResultConsumer = new QueryPhaseResultConsumer( + searchRequest, + executor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + searchPhaseController, + () -> false, + searchProgressListener, + 10, + e -> onPartialMergeFailure.accumulateAndGet(e, (prev, curr) -> { + curr.addSuppressed(prev); + return curr; + }) + ) + ) { CountDownLatch partialReduceLatch = new CountDownLatch(10); @@ -137,8 +138,6 @@ public void testProgressListenerExceptionsAreCaught() throws Exception { queryPhaseResultConsumer.reduce(); assertEquals(1, searchProgressListener.onFinalReduce.get()); - } finally { - queryPhaseResultConsumer.decRef(); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 30e634314e0ba..cb41a03216dc5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -199,8 +199,7 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { Map aliasFilters = Collections.singletonMap("_na_", AliasFilter.EMPTY); CountDownLatch awaitInitialRequests = new CountDownLatch(1); AtomicInteger numRequests = new AtomicInteger(0); - var results = new ArraySearchPhaseResults(shardsIter.size()); - try { + try (var results = new ArraySearchPhaseResults(shardsIter.size())) { AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( "test", logger, @@ -271,8 +270,6 @@ public void run() { latch.await(); assertTrue(searchPhaseDidRun.get()); assertEquals(numShards, numRequests.get()); - } finally { - results.decRef(); } } @@ -314,9 +311,8 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean latchTriggered = new AtomicBoolean(); - var results = new ArraySearchPhaseResults(shardsIter.size()); final TestSearchResponse testResponse = new TestSearchResponse(); - try { + try (var results = new ArraySearchPhaseResults(shardsIter.size())) { AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( "test", logger, @@ -395,7 +391,6 @@ public void run() { assertThat(runnables, equalTo(Collections.emptyList())); } finally { testResponse.decRef(); - results.decRef(); } } @@ -550,8 +545,7 @@ public void testAllowPartialResults() throws InterruptedException { Map aliasFilters = Collections.singletonMap("_na_", AliasFilter.EMPTY); AtomicInteger numRequests = new AtomicInteger(0); AtomicInteger numFailReplicas = new AtomicInteger(0); - var results = new ArraySearchPhaseResults(shardsIter.size()); - try { + try (var results = new ArraySearchPhaseResults(shardsIter.size())) { AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<>( "test", logger, @@ -620,8 +614,6 @@ public void run() { assertTrue(searchPhaseDidRun.get()); assertEquals(numShards, numRequests.get()); assertThat(numFailReplicas.get(), greaterThanOrEqualTo(1)); - } finally { - results.decRef(); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index ac88f999adef6..1f81ad2a02e8c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -558,7 +558,7 @@ private static AtomicArray generateFetchResults( List searchHits = new ArrayList<>(); for (ScoreDoc scoreDoc : mergedSearchDocs) { if (scoreDoc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(scoreDoc.doc, "")); + searchHits.add(SearchHit.unpooled(scoreDoc.doc, "")); if (scoreDoc.score > maxScore) { maxScore = scoreDoc.score; } @@ -570,7 +570,7 @@ private static AtomicArray generateFetchResults( for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) { ScoreDoc doc = option.getDoc(); if (doc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(doc.doc, "")); + searchHits.add(SearchHit.unpooled(doc.doc, "")); if (doc.score > maxScore) { maxScore = doc.score; } @@ -583,7 +583,10 @@ private static AtomicArray generateFetchResults( ProfileResult profileResult = profile && searchHits.size() > 0 ? new ProfileResult("fetch", "fetch", Map.of(), Map.of(), randomNonNegativeLong(), List.of()) : null; - fetchSearchResult.shardResult(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore), profileResult); + fetchSearchResult.shardResult( + SearchHits.unpooled(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore), + profileResult + ); fetchResults.set(shardIndex, fetchSearchResult); } return fetchResults; @@ -610,16 +613,17 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test"))); request.setBatchedReduceSize(bufferSize); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - 3 + numEmptyResponses, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + 3 + numEmptyResponses, + exc -> {} + ) + ) { if (numEmptyResponses == 0) { assertEquals(0, reductions.size()); } @@ -723,8 +727,6 @@ private void consumerTestCase(int numEmptyResponses) throws Exception { assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); assertNull(reduce.sortedTopDocs().collapseValues()); - } finally { - consumer.decRef(); } } @@ -735,16 +737,17 @@ public void testConsumerConcurrently() throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test"))); request.setBatchedReduceSize(bufferSize); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - expectedNumResults, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + expectedNumResults, + exc -> {} + ) + ) { AtomicInteger max = new AtomicInteger(); Thread[] threads = new Thread[expectedNumResults]; CountDownLatch latch = new CountDownLatch(expectedNumResults); @@ -797,8 +800,6 @@ public void testConsumerConcurrently() throws Exception { assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); assertNull(reduce.sortedTopDocs().collapseValues()); - } finally { - consumer.decRef(); } } @@ -808,16 +809,17 @@ public void testConsumerOnlyAggs() throws Exception { SearchRequest request = randomSearchRequest(); request.source(new SearchSourceBuilder().aggregation(new MaxAggregationBuilder("test")).size(0)); request.setBatchedReduceSize(bufferSize); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - expectedNumResults, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + expectedNumResults, + exc -> {} + ) + ) { AtomicInteger max = new AtomicInteger(); CountDownLatch latch = new CountDownLatch(expectedNumResults); for (int i = 0; i < expectedNumResults; i++) { @@ -857,8 +859,6 @@ public void testConsumerOnlyAggs() throws Exception { assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); assertNull(reduce.sortedTopDocs().collapseValues()); - } finally { - consumer.decRef(); } } @@ -870,16 +870,18 @@ public void testConsumerOnlyHits() throws Exception { request.source(new SearchSourceBuilder().size(randomIntBetween(1, 10))); } request.setBatchedReduceSize(bufferSize); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - expectedNumResults, - exc -> {} - ); - try { + + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + expectedNumResults, + exc -> {} + ) + ) { AtomicInteger max = new AtomicInteger(); CountDownLatch latch = new CountDownLatch(expectedNumResults); for (int i = 0; i < expectedNumResults; i++) { @@ -916,8 +918,6 @@ public void testConsumerOnlyHits() throws Exception { assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); assertNull(reduce.sortedTopDocs().collapseValues()); - } finally { - consumer.decRef(); } } @@ -937,16 +937,17 @@ public void testReduceTopNWithFromOffset() throws Exception { SearchRequest request = new SearchRequest(); request.source(new SearchSourceBuilder().size(5).from(5)); request.setBatchedReduceSize(randomIntBetween(2, 4)); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - 4, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + 4, + exc -> {} + ) + ) { int score = 100; CountDownLatch latch = new CountDownLatch(4); for (int i = 0; i < 4; i++) { @@ -984,8 +985,6 @@ public void testReduceTopNWithFromOffset() throws Exception { assertEquals(93.0f, scoreDocs[2].score, 0.0f); assertEquals(92.0f, scoreDocs[3].score, 0.0f); assertEquals(91.0f, scoreDocs[4].score, 0.0f); - } finally { - consumer.decRef(); } } @@ -995,16 +994,17 @@ public void testConsumerSortByField() throws Exception { SearchRequest request = randomSearchRequest(); int size = randomIntBetween(1, 10); request.setBatchedReduceSize(bufferSize); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - expectedNumResults, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + expectedNumResults, + exc -> {} + ) + ) { AtomicInteger max = new AtomicInteger(); SortField[] sortFields = { new SortField("field", SortField.Type.INT, true) }; DocValueFormat[] docValueFormats = { DocValueFormat.RAW }; @@ -1040,8 +1040,6 @@ public void testConsumerSortByField() throws Exception { assertEquals(SortField.Type.INT, reduce.sortedTopDocs().sortFields()[0].getType()); assertNull(reduce.sortedTopDocs().collapseField()); assertNull(reduce.sortedTopDocs().collapseValues()); - } finally { - consumer.decRef(); } } @@ -1051,16 +1049,17 @@ public void testConsumerFieldCollapsing() throws Exception { SearchRequest request = randomSearchRequest(); int size = randomIntBetween(5, 10); request.setBatchedReduceSize(bufferSize); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - expectedNumResults, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + expectedNumResults, + exc -> {} + ) + ) { SortField[] sortFields = { new SortField("field", SortField.Type.STRING) }; BytesRef a = new BytesRef("a"); BytesRef b = new BytesRef("b"); @@ -1100,8 +1099,6 @@ public void testConsumerFieldCollapsing() throws Exception { assertEquals(SortField.Type.STRING, reduce.sortedTopDocs().sortFields()[0].getType()); assertEquals("field", reduce.sortedTopDocs().collapseField()); assertArrayEquals(collapseValues, reduce.sortedTopDocs().collapseValues()); - } finally { - consumer.decRef(); } } @@ -1110,16 +1107,17 @@ public void testConsumerSuggestions() throws Exception { int bufferSize = randomIntBetween(2, 200); SearchRequest request = randomSearchRequest(); request.setBatchedReduceSize(bufferSize); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - expectedNumResults, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + expectedNumResults, + exc -> {} + ) + ) { int maxScoreTerm = -1; int maxScorePhrase = -1; int maxScoreCompletion = -1; @@ -1216,8 +1214,6 @@ public void testConsumerSuggestions() throws Exception { assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); assertNull(reduce.sortedTopDocs().collapseValues()); - } finally { - consumer.decRef(); } } @@ -1257,16 +1253,17 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(numReduceListener.incrementAndGet(), reducePhase); } }; - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - progressListener, - request, - expectedNumResults, - exc -> {} - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + progressListener, + request, + expectedNumResults, + exc -> {} + ) + ) { AtomicInteger max = new AtomicInteger(); Thread[] threads = new Thread[expectedNumResults]; CountDownLatch latch = new CountDownLatch(expectedNumResults); @@ -1324,8 +1321,6 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna assertEquals(expectedNumResults, numQueryResultListener.get()); assertEquals(0, numQueryFailureListener.get()); assertEquals(numReduceListener.get(), reduce.numReducePhases()); - } finally { - consumer.decRef(); } } } @@ -1348,16 +1343,17 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t if (shouldFailPartial) { circuitBreaker.shouldBreak.set(true); } - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - circuitBreaker, - () -> false, - SearchProgressListener.NOOP, - request, - numShards, - exc -> hasConsumedFailure.set(true) - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + circuitBreaker, + () -> false, + SearchProgressListener.NOOP, + request, + numShards, + exc -> hasConsumedFailure.set(true) + ) + ) { CountDownLatch latch = new CountDownLatch(numShards); Thread[] threads = new Thread[numShards]; for (int i = 0; i < numShards; i++) { @@ -1406,8 +1402,6 @@ private void testReduceCase(int numShards, int bufferSize, boolean shouldFail) t } else { consumer.reduce(); } - } finally { - consumer.decRef(); } assertThat(circuitBreaker.allocated, equalTo(0L)); } @@ -1420,16 +1414,17 @@ public void testFailConsumeAggs() throws Exception { request.source(new SearchSourceBuilder().aggregation(AggregationBuilders.avg("foo")).size(0)); request.setBatchedReduceSize(bufferSize); AtomicBoolean hasConsumedFailure = new AtomicBoolean(); - SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( - fixedExecutor, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - () -> false, - SearchProgressListener.NOOP, - request, - expectedNumResults, - exc -> hasConsumedFailure.set(true) - ); - try { + try ( + SearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults( + fixedExecutor, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + () -> false, + SearchProgressListener.NOOP, + request, + expectedNumResults, + exc -> hasConsumedFailure.set(true) + ) + ) { for (int i = 0; i < expectedNumResults; i++) { final int index = i; QuerySearchResult result = new QuerySearchResult( @@ -1454,8 +1449,6 @@ public void testFailConsumeAggs() throws Exception { } } assertNull(consumer.reduce().aggregations()); - } finally { - consumer.decRef(); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 760070979077d..aef472928923b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -187,17 +187,18 @@ public void sendExecuteQuery( searchRequest.allowPartialSearchResults(false); SearchPhaseController controller = new SearchPhaseController((t, r) -> InternalAggregationTestCase.emptyReduceContextBuilder()); SearchTask task = new SearchTask(0, "n/a", "n/a", () -> "test", null, Collections.emptyMap()); - QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( - searchRequest, - EsExecutors.DIRECT_EXECUTOR_SERVICE, - new NoopCircuitBreaker(CircuitBreaker.REQUEST), - controller, - task::isCancelled, - task.getProgressListener(), - shardsIter.size(), - exc -> {} - ); - try { + try ( + QueryPhaseResultConsumer resultConsumer = new QueryPhaseResultConsumer( + searchRequest, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + new NoopCircuitBreaker(CircuitBreaker.REQUEST), + controller, + task::isCancelled, + task.getProgressListener(), + shardsIter.size(), + exc -> {} + ) + ) { SearchQueryThenFetchAsyncAction action = new SearchQueryThenFetchAsyncAction( logger, null, @@ -252,8 +253,6 @@ public void run() { assertThat(phase.sortedTopDocs().scoreDocs()[0], instanceOf(FieldDoc.class)); assertThat(((FieldDoc) phase.sortedTopDocs().scoreDocs()[0]).fields.length, equalTo(1)); assertThat(((FieldDoc) phase.sortedTopDocs().scoreDocs()[0]).fields[0], equalTo(0)); - } finally { - resultConsumer.decRef(); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 8c0ffeabf0ea6..6d66a1fcd3847 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -154,7 +154,7 @@ public void testRandomVersionSerialization() throws IOException { // Versions before 8.8 don't support rank searchRequest.source().rankBuilder(null); } - if (version.before(TransportVersions.V_8_500_020) && searchRequest.source() != null) { + if (version.before(TransportVersions.V_8_9_X) && searchRequest.source() != null) { // Versions before 8_500_999 don't support queries searchRequest.source().subSearches(new ArrayList<>()); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 0d34634df5ec4..0070d61a2adcb 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -395,7 +395,7 @@ public void testMergeCompletionSuggestions() throws InterruptedException { i, Collections.emptyMap() ); - SearchHit hit = new SearchHit(docId); + SearchHit hit = SearchHit.unpooled(docId); ShardId shardId = new ShardId( randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), @@ -481,7 +481,7 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedException 1F, Collections.emptyMap() ); - SearchHit searchHit = new SearchHit(docId); + SearchHit searchHit = SearchHit.unpooled(docId); searchHit.shard( new SearchShardTarget( "node", @@ -826,6 +826,7 @@ public void testMergeSearchHits() throws InterruptedException { try { addResponse(searchResponseMerger, searchResponse); } finally { + searchHits.decRef(); searchResponse.decRef(); } } @@ -972,6 +973,7 @@ public void testMergeEmptySearchHitsWithNonEmpty() { try { merger.add(searchResponse); } finally { + searchHits.decRef(); searchResponse.decRef(); } } @@ -1158,7 +1160,7 @@ public void testPartialAggsMixedWithFullResponses() { int successful = 2; int skipped = 1; Index[] indices = new Index[] { new Index("foo_idx", "1bba9f5b-c5a1-4664-be1b-26be590c1aff") }; - SearchResponse searchResponseRemote1 = new SearchResponse( + final SearchResponse searchResponseRemote1 = new SearchResponse( createSimpleDeterministicSearchHits(clusterAlias, indices), createDeterminsticAggregation(maxAggName, rangeAggName, value, count), null, @@ -1199,268 +1201,282 @@ public void testPartialAggsMixedWithFullResponses() { ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY ); - - SearchResponse.Clusters clusters = SearchResponseTests.createCCSClusterObject(3, 2, true, 2, 1, 0, 0, new ShardSearchFailure[0]); - - // merge partial aggs with remote1, check, then merge in remote2, check - try ( - SearchResponseMerger searchResponseMerger = new SearchResponseMerger( + try { + SearchResponse.Clusters clusters = SearchResponseTests.createCCSClusterObject( + 3, + 2, + true, + 2, + 1, 0, - 10, - 10, - new SearchTimeProvider(0, 0, () -> 0), - emptyReduceContextBuilder( - new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName)) - .addAggregator(new DateRangeAggregationBuilder(rangeAggName)) + 0, + new ShardSearchFailure[0] + ); + + // merge partial aggs with remote1, check, then merge in remote2, check + try ( + SearchResponseMerger searchResponseMerger = new SearchResponseMerger( + 0, + 10, + 10, + new SearchTimeProvider(0, 0, () -> 0), + emptyReduceContextBuilder( + new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName)) + .addAggregator(new DateRangeAggregationBuilder(rangeAggName)) + ) ) - ) - ) { - searchResponseMerger.add(searchResponsePartialAggs); - searchResponseMerger.add(searchResponseRemote1); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); - try { - SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1 - SearchHit hit1 = hits.getHits()[0]; - String expectedHit1 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 2.0, - "sort" : [ - 2.0 - ] - }"""; - assertEquals(hit1.toString(), expectedHit1); - - SearchHit hit2 = hits.getHits()[1]; - String expectedHit2 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 1.0, - "sort" : [ - 1.0 - ] - }"""; - assertEquals(hit2.toString(), expectedHit2); - - double expectedMaxValue = 44.44; // value from remote1 - long expectedBucketsDocCount = 33 + 44; - Max max = mergedResponse.getAggregations().get(maxAggName); - assertEquals(expectedMaxValue, max.value(), 0d); - Range range = mergedResponse.getAggregations().get(rangeAggName); - assertEquals(1, range.getBuckets().size()); - Range.Bucket bucket = range.getBuckets().get(0); - assertEquals("0.0", bucket.getFromAsString()); - assertEquals("10000.0", bucket.getToAsString()); - assertEquals(expectedBucketsDocCount, bucket.getDocCount()); - } finally { - mergedResponse.decRef(); - } + ) { + searchResponseMerger.add(searchResponsePartialAggs); + searchResponseMerger.add(searchResponseRemote1); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + try { + SearchHits hits = mergedResponse.getHits(); + assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1 + SearchHit hit1 = hits.getHits()[0]; + String expectedHit1 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 2.0, + "sort" : [ + 2.0 + ] + }"""; + assertEquals(hit1.toString(), expectedHit1); + + SearchHit hit2 = hits.getHits()[1]; + String expectedHit2 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 1.0, + "sort" : [ + 1.0 + ] + }"""; + assertEquals(hit2.toString(), expectedHit2); + + double expectedMaxValue = 44.44; // value from remote1 + long expectedBucketsDocCount = 33 + 44; + Max max = mergedResponse.getAggregations().get(maxAggName); + assertEquals(expectedMaxValue, max.value(), 0d); + Range range = mergedResponse.getAggregations().get(rangeAggName); + assertEquals(1, range.getBuckets().size()); + Range.Bucket bucket = range.getBuckets().get(0); + assertEquals("0.0", bucket.getFromAsString()); + assertEquals("10000.0", bucket.getToAsString()); + assertEquals(expectedBucketsDocCount, bucket.getDocCount()); + } finally { + mergedResponse.decRef(); + } - searchResponseMerger.add(searchResponseRemote2); - mergedResponse = searchResponseMerger.getMergedResponse(clusters); - try { - SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 - - SearchHit hit1 = hits.getHits()[0]; - String expectedHit1 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 2.0, - "sort" : [ - 2.0 - ] - }"""; - assertEquals(hit1.toString(), expectedHit1); - - SearchHit hit2 = hits.getHits()[1]; - String expectedHit2 = """ - { - "_index" : "remote2:foo_idx", - "_score" : 2.0, - "sort" : [ - 2.0 - ] - }"""; - assertEquals(hit2.toString(), expectedHit2); - - SearchHit hit3 = hits.getHits()[2]; - String expectedHit3 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 1.0, - "sort" : [ - 1.0 - ] - }"""; - assertEquals(hit3.toString(), expectedHit3); - - SearchHit hit4 = hits.getHits()[3]; - String expectedHit4 = """ - { - "_index" : "remote2:foo_idx", - "_score" : 1.0, - "sort" : [ - 1.0 - ] - }"""; - assertEquals(hit4.toString(), expectedHit4); - - double expectedMaxValue = 55.55; // value from remote2 - long expectedBucketsDocCount = 33 + 44 + 55; - Max max = mergedResponse.getAggregations().get(maxAggName); - assertEquals(expectedMaxValue, max.value(), 0d); - Range range = mergedResponse.getAggregations().get(rangeAggName); - assertEquals(1, range.getBuckets().size()); - Range.Bucket bucket = range.getBuckets().get(0); - assertEquals("0.0", bucket.getFromAsString()); - assertEquals("10000.0", bucket.getToAsString()); - assertEquals(expectedBucketsDocCount, bucket.getDocCount()); - } finally { - mergedResponse.decRef(); + searchResponseMerger.add(searchResponseRemote2); + mergedResponse = searchResponseMerger.getMergedResponse(clusters); + try { + SearchHits hits = mergedResponse.getHits(); + assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + + SearchHit hit1 = hits.getHits()[0]; + String expectedHit1 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 2.0, + "sort" : [ + 2.0 + ] + }"""; + assertEquals(hit1.toString(), expectedHit1); + + SearchHit hit2 = hits.getHits()[1]; + String expectedHit2 = """ + { + "_index" : "remote2:foo_idx", + "_score" : 2.0, + "sort" : [ + 2.0 + ] + }"""; + assertEquals(hit2.toString(), expectedHit2); + + SearchHit hit3 = hits.getHits()[2]; + String expectedHit3 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 1.0, + "sort" : [ + 1.0 + ] + }"""; + assertEquals(hit3.toString(), expectedHit3); + + SearchHit hit4 = hits.getHits()[3]; + String expectedHit4 = """ + { + "_index" : "remote2:foo_idx", + "_score" : 1.0, + "sort" : [ + 1.0 + ] + }"""; + assertEquals(hit4.toString(), expectedHit4); + + double expectedMaxValue = 55.55; // value from remote2 + long expectedBucketsDocCount = 33 + 44 + 55; + Max max = mergedResponse.getAggregations().get(maxAggName); + assertEquals(expectedMaxValue, max.value(), 0d); + Range range = mergedResponse.getAggregations().get(rangeAggName); + assertEquals(1, range.getBuckets().size()); + Range.Bucket bucket = range.getBuckets().get(0); + assertEquals("0.0", bucket.getFromAsString()); + assertEquals("10000.0", bucket.getToAsString()); + assertEquals(expectedBucketsDocCount, bucket.getDocCount()); + } finally { + mergedResponse.decRef(); + } } - } - // merge remote1 and remote2, no partial aggs, check, then merge in partial aggs from local, check - try ( - SearchResponseMerger searchResponseMerger = new SearchResponseMerger( - 0, - 10, - 10, - new SearchTimeProvider(0, 0, () -> 0), - emptyReduceContextBuilder( - new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName)) - .addAggregator(new DateRangeAggregationBuilder(rangeAggName)) + // merge remote1 and remote2, no partial aggs, check, then merge in partial aggs from local, check + try ( + SearchResponseMerger searchResponseMerger = new SearchResponseMerger( + 0, + 10, + 10, + new SearchTimeProvider(0, 0, () -> 0), + emptyReduceContextBuilder( + new AggregatorFactories.Builder().addAggregator(new MaxAggregationBuilder(maxAggName)) + .addAggregator(new DateRangeAggregationBuilder(rangeAggName)) + ) ) - ) - ) { - searchResponseMerger.add(searchResponseRemote2); - searchResponseMerger.add(searchResponseRemote1); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); - try { - SearchHits hits = mergedResponse.getHits(); - SearchHit hit1 = hits.getHits()[0]; - String expectedHit1 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 2.0, - "sort" : [ - 2.0 - ] - }"""; - assertEquals(hit1.toString(), expectedHit1); - - SearchHit hit2 = hits.getHits()[1]; - String expectedHit2 = """ - { - "_index" : "remote2:foo_idx", - "_score" : 2.0, - "sort" : [ - 2.0 - ] - }"""; - assertEquals(hit2.toString(), expectedHit2); - - SearchHit hit3 = hits.getHits()[2]; - String expectedHit3 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 1.0, - "sort" : [ - 1.0 - ] - }"""; - assertEquals(hit3.toString(), expectedHit3); - - SearchHit hit4 = hits.getHits()[3]; - String expectedHit4 = """ - { - "_index" : "remote2:foo_idx", - "_score" : 1.0, - "sort" : [ - 1.0 - ] - }"""; - assertEquals(hit4.toString(), expectedHit4); - - double expectedMaxValue = 55.55; // value from remote2 - long expectedBucketsDocCount = 44 + 55; // missing 33 from local partial aggs - Max max = mergedResponse.getAggregations().get(maxAggName); - assertEquals(expectedMaxValue, max.value(), 0d); - Range range = mergedResponse.getAggregations().get(rangeAggName); - assertEquals(1, range.getBuckets().size()); - Range.Bucket bucket = range.getBuckets().get(0); - assertEquals("0.0", bucket.getFromAsString()); - assertEquals("10000.0", bucket.getToAsString()); - assertEquals(expectedBucketsDocCount, bucket.getDocCount()); - } finally { - mergedResponse.decRef(); - } + ) { + searchResponseMerger.add(searchResponseRemote2); + searchResponseMerger.add(searchResponseRemote1); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + try { + SearchHits hits = mergedResponse.getHits(); + SearchHit hit1 = hits.getHits()[0]; + String expectedHit1 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 2.0, + "sort" : [ + 2.0 + ] + }"""; + assertEquals(hit1.toString(), expectedHit1); + + SearchHit hit2 = hits.getHits()[1]; + String expectedHit2 = """ + { + "_index" : "remote2:foo_idx", + "_score" : 2.0, + "sort" : [ + 2.0 + ] + }"""; + assertEquals(hit2.toString(), expectedHit2); + + SearchHit hit3 = hits.getHits()[2]; + String expectedHit3 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 1.0, + "sort" : [ + 1.0 + ] + }"""; + assertEquals(hit3.toString(), expectedHit3); + + SearchHit hit4 = hits.getHits()[3]; + String expectedHit4 = """ + { + "_index" : "remote2:foo_idx", + "_score" : 1.0, + "sort" : [ + 1.0 + ] + }"""; + assertEquals(hit4.toString(), expectedHit4); + + double expectedMaxValue = 55.55; // value from remote2 + long expectedBucketsDocCount = 44 + 55; // missing 33 from local partial aggs + Max max = mergedResponse.getAggregations().get(maxAggName); + assertEquals(expectedMaxValue, max.value(), 0d); + Range range = mergedResponse.getAggregations().get(rangeAggName); + assertEquals(1, range.getBuckets().size()); + Range.Bucket bucket = range.getBuckets().get(0); + assertEquals("0.0", bucket.getFromAsString()); + assertEquals("10000.0", bucket.getToAsString()); + assertEquals(expectedBucketsDocCount, bucket.getDocCount()); + } finally { + mergedResponse.decRef(); + } - searchResponseMerger.add(searchResponsePartialAggs); - mergedResponse = searchResponseMerger.getMergedResponse(clusters); - try { - SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 - - SearchHit hit1 = hits.getHits()[0]; - String expectedHit1 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 2.0, - "sort" : [ - 2.0 - ] - }"""; - assertEquals(hit1.toString(), expectedHit1); - - SearchHit hit2 = hits.getHits()[1]; - String expectedHit2 = """ - { - "_index" : "remote2:foo_idx", - "_score" : 2.0, - "sort" : [ - 2.0 - ] - }"""; - assertEquals(hit2.toString(), expectedHit2); - - SearchHit hit3 = hits.getHits()[2]; - String expectedHit3 = """ - { - "_index" : "remote1:foo_idx", - "_score" : 1.0, - "sort" : [ - 1.0 - ] - }"""; - assertEquals(hit3.toString(), expectedHit3); - - SearchHit hit4 = hits.getHits()[3]; - String expectedHit4 = """ - { - "_index" : "remote2:foo_idx", - "_score" : 1.0, - "sort" : [ - 1.0 - ] - }"""; - assertEquals(hit4.toString(), expectedHit4); - - double expectedMaxValue = 55.55; // value from remote2 - long expectedBucketsDocCount = 33 + 44 + 55; // contributions from all 3 search responses - Max max = mergedResponse.getAggregations().get(maxAggName); - assertEquals(expectedMaxValue, max.value(), 0d); - Range range = mergedResponse.getAggregations().get(rangeAggName); - assertEquals(1, range.getBuckets().size()); - Range.Bucket bucket = range.getBuckets().get(0); - assertEquals("0.0", bucket.getFromAsString()); - assertEquals("10000.0", bucket.getToAsString()); - assertEquals(expectedBucketsDocCount, bucket.getDocCount()); - } finally { - mergedResponse.decRef(); + searchResponseMerger.add(searchResponsePartialAggs); + mergedResponse = searchResponseMerger.getMergedResponse(clusters); + try { + SearchHits hits = mergedResponse.getHits(); + assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + + SearchHit hit1 = hits.getHits()[0]; + String expectedHit1 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 2.0, + "sort" : [ + 2.0 + ] + }"""; + assertEquals(hit1.toString(), expectedHit1); + + SearchHit hit2 = hits.getHits()[1]; + String expectedHit2 = """ + { + "_index" : "remote2:foo_idx", + "_score" : 2.0, + "sort" : [ + 2.0 + ] + }"""; + assertEquals(hit2.toString(), expectedHit2); + + SearchHit hit3 = hits.getHits()[2]; + String expectedHit3 = """ + { + "_index" : "remote1:foo_idx", + "_score" : 1.0, + "sort" : [ + 1.0 + ] + }"""; + assertEquals(hit3.toString(), expectedHit3); + + SearchHit hit4 = hits.getHits()[3]; + String expectedHit4 = """ + { + "_index" : "remote2:foo_idx", + "_score" : 1.0, + "sort" : [ + 1.0 + ] + }"""; + assertEquals(hit4.toString(), expectedHit4); + + double expectedMaxValue = 55.55; // value from remote2 + long expectedBucketsDocCount = 33 + 44 + 55; // contributions from all 3 search responses + Max max = mergedResponse.getAggregations().get(maxAggName); + assertEquals(expectedMaxValue, max.value(), 0d); + Range range = mergedResponse.getAggregations().get(rangeAggName); + assertEquals(1, range.getBuckets().size()); + Range.Bucket bucket = range.getBuckets().get(0); + assertEquals("0.0", bucket.getFromAsString()); + assertEquals("10000.0", bucket.getToAsString()); + assertEquals(expectedBucketsDocCount, bucket.getDocCount()); + } finally { + mergedResponse.decRef(); + } } + } finally { + searchResponseRemote1.decRef(); + searchResponseRemote2.decRef(); + searchResponsePartialAggs.decRef(); } } @@ -1475,15 +1491,7 @@ private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Inde PriorityQueue priorityQueue = new PriorityQueue<>(new SearchHitComparator(sortFields)); SearchHit[] hits = deterministicSearchHitArray(numDocs, clusterAlias, indices, maxScore, scoreFactor, sortFields, priorityQueue); - SearchHits searchHits = new SearchHits( - hits, - totalHits, - maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore, - sortFields, - null, - null - ); - return searchHits; + return SearchHits.unpooled(hits, totalHits, maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore, sortFields, null, null); } private static InternalAggregations createDeterminsticAggregation(String maxAggName, String rangeAggName, double value, int count) { @@ -1523,7 +1531,7 @@ private static SearchHit[] deterministicSearchHitArray( for (int j = 0; j < numDocs; j++) { ShardId shardId = new ShardId(randomFrom(indices), j); SearchShardTarget shardTarget = new SearchShardTarget("abc123", shardId, clusterAlias); - SearchHit hit = new SearchHit(j); + SearchHit hit = SearchHit.unpooled(j); float score = Float.NaN; if (Float.isNaN(maxScore) == false) { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index ef759279e095f..0d85d020c4180 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -115,25 +115,29 @@ private SearchResponse createTestItem(boolean minimal, ShardSearchFailure... sha } if (minimal == false) { SearchHits hits = SearchHitsTests.createTestItem(true, true); - InternalAggregations aggregations = aggregationsTests.createTestInstance(); - Suggest suggest = SuggestTests.createTestItem(); - SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem(); - return new SearchResponse( - hits, - aggregations, - suggest, - timedOut, - terminatedEarly, - profileResults, - numReducePhases, - null, - totalShards, - successfulShards, - skippedShards, - tookInMillis, - shardSearchFailures, - clusters - ); + try { + InternalAggregations aggregations = aggregationsTests.createTestInstance(); + Suggest suggest = SuggestTests.createTestItem(); + SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem(); + return new SearchResponse( + hits, + aggregations, + suggest, + timedOut, + terminatedEarly, + profileResults, + numReducePhases, + null, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardSearchFailures, + clusters + ); + } finally { + hits.decRef(); + } } else { return SearchResponseUtils.emptyWithTotalHits( null, @@ -381,9 +385,10 @@ public void testToXContent() throws IOException { SearchHit hit = new SearchHit(1, "id1"); hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; + var sHits = new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f); { SearchResponse response = new SearchResponse( - new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), + sHits, null, null, false, @@ -425,7 +430,7 @@ public void testToXContent() throws IOException { } { SearchResponse response = new SearchResponse( - new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), + sHits, null, null, false, @@ -475,7 +480,7 @@ public void testToXContent() throws IOException { } { SearchResponse response = new SearchResponse( - new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), + sHits, null, null, false, @@ -617,6 +622,7 @@ public void testToXContent() throws IOException { response.decRef(); } } + sHits.decRef(); } public void testSerialization() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 8bda62b91bc7e..86749c26ba730 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionTestUtils; @@ -20,6 +19,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -286,9 +286,9 @@ protected void shardExecute( } } - public FlushResponse assertImmediateResponse(String index, TransportFlushAction flushAction) { + public BroadcastResponse assertImmediateResponse(String index, TransportFlushAction flushAction) { Date beginDate = new Date(); - FlushResponse flushResponse = ActionTestUtils.executeBlocking(flushAction, new FlushRequest(index)); + BroadcastResponse flushResponse = ActionTestUtils.executeBlocking(flushAction, new FlushRequest(index)); Date endDate = new Date(); long maxTime = 500; assertThat( diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 735ae41558240..7ee4d2d6bba9b 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -55,6 +55,7 @@ import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; @@ -516,6 +517,27 @@ public void testToValidateUpsertRequestWithVersion() { assertThat(updateRequest.validate().validationErrors(), contains("can't provide version in upsert request")); } + public void testUpdatingRejectsLongIds() { + String id = randomAlphaOfLength(511); + UpdateRequest request = new UpdateRequest("index", id); + request.doc("{}", XContentType.JSON); + ActionRequestValidationException validate = request.validate(); + assertNull(validate); + + id = randomAlphaOfLength(512); + request = new UpdateRequest("index", id); + request.doc("{}", XContentType.JSON); + validate = request.validate(); + assertNull(validate); + + id = randomAlphaOfLength(513); + request = new UpdateRequest("index", id); + request.doc("{}", XContentType.JSON); + validate = request.validate(); + assertThat(validate, notNullValue()); + assertThat(validate.getMessage(), containsString("id [" + id + "] is too long, must be no longer than 512 bytes but was: 513")); + } + public void testValidate() { { UpdateRequest request = new UpdateRequest("index", "id"); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java index 395dde29597d3..ae557b1b418da 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelperTests.java @@ -42,6 +42,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptySet; +import static java.util.Collections.singleton; import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.BOOTSTRAP_PLACEHOLDER_PREFIX; import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; @@ -91,6 +92,7 @@ public void testScheduling() { clusterState, emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -189,6 +191,7 @@ public void testDescriptionOnMasterIneligibleNodes() { clusterState, emptyList(), emptyList(), + emptySet(), 15L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -207,6 +210,7 @@ public void testDescriptionOnMasterIneligibleNodes() { clusterState, singletonList(otherAddress), emptyList(), + emptySet(), 16L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -226,6 +230,7 @@ public void testDescriptionOnMasterIneligibleNodes() { clusterState, emptyList(), singletonList(otherNode), + emptySet(), 17L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -253,6 +258,7 @@ public void testDescriptionOnUnhealthyNodes() { clusterState, emptyList(), emptyList(), + emptySet(), 15L, electionStrategy, new StatusInfo(UNHEALTHY, "unhealthy-info"), @@ -273,6 +279,7 @@ public void testDescriptionOnUnhealthyNodes() { clusterState, emptyList(), emptyList(), + emptySet(), 15L, electionStrategy, new StatusInfo(UNHEALTHY, "unhealthy-info"), @@ -296,6 +303,7 @@ public void testDescriptionBeforeBootstrapping() { clusterState, emptyList(), emptyList(), + emptySet(), 1L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -317,6 +325,7 @@ public void testDescriptionBeforeBootstrapping() { clusterState, singletonList(otherAddress), emptyList(), + emptySet(), 2L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -340,6 +349,7 @@ public void testDescriptionBeforeBootstrapping() { clusterState, emptyList(), singletonList(otherNode), + emptySet(), 3L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -362,6 +372,7 @@ public void testDescriptionBeforeBootstrapping() { clusterState, emptyList(), emptyList(), + emptySet(), 4L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -414,6 +425,7 @@ public void testDescriptionAfterDetachCluster() { clusterState, emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -435,6 +447,7 @@ public void testDescriptionAfterDetachCluster() { clusterState, singletonList(otherAddress), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -458,6 +471,7 @@ public void testDescriptionAfterDetachCluster() { clusterState, emptyList(), singletonList(otherNode), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -481,6 +495,7 @@ public void testDescriptionAfterDetachCluster() { clusterState, emptyList(), singletonList(yetAnotherNode), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -510,6 +525,7 @@ public void testDescriptionAfterBootstrapping() { clusterState, emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -531,6 +547,7 @@ public void testDescriptionAfterBootstrapping() { clusterState, singletonList(otherAddress), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -554,6 +571,7 @@ public void testDescriptionAfterBootstrapping() { clusterState, emptyList(), singletonList(otherNode), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -577,6 +595,7 @@ public void testDescriptionAfterBootstrapping() { clusterState, emptyList(), singletonList(yetAnotherNode), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -593,12 +612,39 @@ public void testDescriptionAfterBootstrapping() { ) ); + final DiscoveryNode recentMaster = makeDiscoveryNode("recentMaster"); + assertThat( + new ClusterFormationState( + Settings.EMPTY, + clusterState, + emptyList(), + singletonList(yetAnotherNode), + singleton(recentMaster), + 0L, + electionStrategy, + new StatusInfo(HEALTHY, "healthy-info"), + emptyList() + ).getDescription(), + is( + "master not discovered or elected yet, an election requires a node with id [otherNode], " + + "have only discovered non-quorum [" + + noAttr(yetAnotherNode) + + "] who claim current master to be [" + + noAttr(recentMaster) + + "]; " + + "discovery will continue using [] from hosts providers and [" + + noAttr(localNode) + + "] from last-known cluster state; node term 0, last-accepted version 0 in term 0" + ) + ); + assertThat( new ClusterFormationState( Settings.EMPTY, state(localNode, "n1", "n2"), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -619,6 +665,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, "n1", "n2", "n3"), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -639,6 +686,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, "n1", "n2", BOOTSTRAP_PLACEHOLDER_PREFIX + "n3"), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -659,6 +707,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, "n1", "n2", "n3", "n4"), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -679,6 +728,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, "n1", "n2", "n3", "n4", "n5"), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -699,6 +749,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, "n1", "n2", "n3", "n4", BOOTSTRAP_PLACEHOLDER_PREFIX + "n5"), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -719,6 +770,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, "n1", "n2", "n3", BOOTSTRAP_PLACEHOLDER_PREFIX + "n4", BOOTSTRAP_PLACEHOLDER_PREFIX + "n5"), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -739,6 +791,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, new String[] { "n1" }, new String[] { "n1" }), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -759,6 +812,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, new String[] { "n1" }, new String[] { "n2" }), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -779,6 +833,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, new String[] { "n1" }, new String[] { "n2", "n3" }), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -799,6 +854,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, new String[] { "n1" }, new String[] { "n2", "n3", "n4" }), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -841,6 +897,7 @@ public void testDescriptionAfterBootstrapping() { stateWithOtherNodes, emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -875,6 +932,7 @@ public void testDescriptionAfterBootstrapping() { state(localNode, GatewayMetaState.STALE_STATE_CONFIG_NODE_ID), emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -910,6 +968,7 @@ public void testJoinStatusReporting() { clusterState, emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -924,6 +983,7 @@ public void testJoinStatusReporting() { clusterState, emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -970,6 +1030,7 @@ public void testJoinStatusReporting() { clusterState, emptyList(), emptyList(), + emptySet(), 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -987,6 +1048,10 @@ public void testClusterFormationStateSerialization() { DiscoveryNodeUtils.create(UUID.randomUUID().toString()), DiscoveryNodeUtils.create(UUID.randomUUID().toString()) ); + Set mastersOfPeers = Set.of( + DiscoveryNodeUtils.create(UUID.randomUUID().toString()), + DiscoveryNodeUtils.create(UUID.randomUUID().toString()) + ); List joinStatuses = List.of( new JoinStatus( DiscoveryNodeUtils.create(UUID.randomUUID().toString()), @@ -1001,6 +1066,7 @@ public void testClusterFormationStateSerialization() { state(localNode, new String[] { "n1" }, new String[] { "n2", "n3", "n4" }), resolvedAddresses, foundPeers, + mastersOfPeers, 0L, electionStrategy, new StatusInfo(HEALTHY, "healthy-info"), @@ -1035,6 +1101,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState final DiscoveryNode localNode = originalClusterFormationState.localNode(); List resolvedAddresses = originalClusterFormationState.resolvedAddresses(); List foundPeers = originalClusterFormationState.foundPeers(); + Set mastersOfPeers = originalClusterFormationState.mastersOfPeers(); long currentTerm = originalClusterFormationState.currentTerm(); StatusInfo statusInfo = originalClusterFormationState.statusInfo(); List joinStatuses = originalClusterFormationState.inFlightJoinStatuses(); @@ -1043,13 +1110,14 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState originalClusterFormationState.lastAcceptedConfiguration(), originalClusterFormationState.lastCommittedConfiguration() ); - switch (randomIntBetween(1, 5)) { + switch (randomIntBetween(1, 6)) { case 1 -> { return new ClusterFormationState( settings, clusterState, resolvedAddresses, foundPeers, + mastersOfPeers, currentTerm + 1, electionStrategy, statusInfo, @@ -1064,6 +1132,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState clusterState, resolvedAddresses, newFoundPeers, + mastersOfPeers, currentTerm, electionStrategy, statusInfo, @@ -1085,6 +1154,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState clusterState, resolvedAddresses, foundPeers, + mastersOfPeers, currentTerm, electionStrategy, statusInfo, @@ -1098,6 +1168,7 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState clusterState, resolvedAddresses, foundPeers, + mastersOfPeers, currentTerm, electionStrategy, newStatusInfo, @@ -1110,6 +1181,26 @@ private ClusterFormationState mutateClusterFormationState(ClusterFormationState clusterState, resolvedAddresses, foundPeers, + mastersOfPeers, + currentTerm, + electionStrategy, + statusInfo, + joinStatuses + ); + } + case 6 -> { + List newMastersOfPeers = new ArrayList<>(mastersOfPeers); + if (mastersOfPeers.isEmpty() || randomBoolean()) { + newMastersOfPeers.add(DiscoveryNodeUtils.create(UUID.randomUUID().toString())); + } else { + newMastersOfPeers.remove(0); + } + return new ClusterFormationState( + settings, + clusterState, + resolvedAddresses, + foundPeers, + Set.copyOf(newMastersOfPeers), currentTerm, electionStrategy, statusInfo, diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java index 0d93dfb3d7f62..2ad0f18de277f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/CoordinationDiagnosticsServiceTests.java @@ -993,6 +993,7 @@ private ClusterFormationFailureHelper.ClusterFormationState getClusterFormationS hasDiscoveredAllNodes ? allMasterEligibleNodes : randomSubsetOf(randomInt(allMasterEligibleNodes.size() - 1), allMasterEligibleNodes), + Collections.emptySet(), randomLong(), hasDiscoveredQuorum, new StatusInfo(randomFrom(StatusInfo.Status.HEALTHY, StatusInfo.Status.UNHEALTHY), randomAlphaOfLength(20)), diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java index 18385b1d7ad44..77c59fe9e8209 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java @@ -43,9 +43,9 @@ import java.util.Map; import java.util.UUID; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -126,9 +126,14 @@ public void testGetHealthIndicatorResultNotGreenVerboseTrue() throws Exception { assertThat(nodeIdToClusterFormationMap.get(node2.getId()), equalTo(node2ClusterFormation)); assertThat(nodeIdToNodeNameMap.get(node1.getId()), equalTo(node1.getName())); assertThat(nodeIdToNodeNameMap.get(node2.getId()), equalTo(node2.getName())); - List diagnosis = result.diagnosisList(); - assertThat(diagnosis.size(), equalTo(1)); - assertThat(diagnosis.get(0), is(StableMasterHealthIndicatorService.CONTACT_SUPPORT)); + assertThat( + result.diagnosisList(), + containsInAnyOrder( + StableMasterHealthIndicatorService.CONTACT_SUPPORT, + StableMasterHealthIndicatorService.TROUBLESHOOT_DISCOVERY, + StableMasterHealthIndicatorService.TROUBLESHOOT_UNSTABLE_CLUSTER + ) + ); } public void testGetHealthIndicatorResultNotGreenVerboseFalse() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java index fb9bde31e8fc4..cb1dddd7c51f3 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.hamcrest.Matcher; import java.util.Locale; @@ -501,4 +502,41 @@ public void testNodeProcessorsFloatValidation() { } } + // This test must complete to ensure that our basic infrastructure is working as expected. + // Specifically that ExecutorScalingQueue, which subclasses LinkedTransferQueue, correctly + // tracks tasks submitted to the executor. + public void testBasicTaskExecution() { + final var executorService = EsExecutors.newScaling( + "test", + 0, + between(1, 5), + 60, + TimeUnit.SECONDS, + randomBoolean(), + EsExecutors.daemonThreadFactory("test"), + new ThreadContext(Settings.EMPTY) + ); + try { + final var countDownLatch = new CountDownLatch(between(1, 10)); + class TestTask extends AbstractRunnable { + @Override + protected void doRun() { + countDownLatch.countDown(); + if (countDownLatch.getCount() > 0) { + executorService.execute(TestTask.this); + } + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + } + + executorService.execute(new TestTask()); + safeAwait(countDownLatch); + } finally { + ThreadPool.terminate(executorService, 10, TimeUnit.SECONDS); + } + } } diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ExecutorScalingQueueTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ExecutorScalingQueueTests.java new file mode 100644 index 0000000000000..b1e1b9d620d2a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ExecutorScalingQueueTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.util.concurrent; + +import org.elasticsearch.test.ESTestCase; + +import java.util.concurrent.TimeUnit; + +public class ExecutorScalingQueueTests extends ESTestCase { + + public void testPut() { + var queue = new EsExecutors.ExecutorScalingQueue<>(); + queue.put(new Object()); + assertEquals(queue.size(), 1); + } + + public void testAdd() { + var queue = new EsExecutors.ExecutorScalingQueue<>(); + assertTrue(queue.add(new Object())); + assertEquals(queue.size(), 1); + } + + public void testTimedOffer() { + var queue = new EsExecutors.ExecutorScalingQueue<>(); + assertTrue(queue.offer(new Object(), 60, TimeUnit.SECONDS)); + assertEquals(queue.size(), 1); + } + +} diff --git a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java index ce5841d066d88..209261e8dce70 100644 --- a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java @@ -883,6 +883,46 @@ public boolean innerMatch(LogEvent event) { } } + @TestLogging(reason = "testing logging at WARN level", value = "org.elasticsearch.discovery:WARN") + public void testEventuallyLogsIfReturnedMasterIsUnreachable() { + final DiscoveryNode otherNode = newDiscoveryNode("node-from-hosts-list"); + providedAddresses.add(otherNode.getAddress()); + transportAddressConnector.addReachableNode(otherNode); + + peerFinder.activate(lastAcceptedNodes); + final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + VERBOSITY_INCREASE_TIMEOUT_SETTING.get(Settings.EMPTY).millis() + + PeerFinder.DISCOVERY_FIND_PEERS_INTERVAL_SETTING.get(Settings.EMPTY).millis(); + + runAllRunnableTasks(); + + assertFoundPeers(otherNode); + final DiscoveryNode unreachableMaster = newDiscoveryNode("unreachable-master"); + transportAddressConnector.unreachableAddresses.add(unreachableMaster.getAddress()); + + MockLogAppender.assertThatLogger(() -> { + while (deterministicTaskQueue.getCurrentTimeMillis() <= endTime) { + deterministicTaskQueue.advanceTime(); + runAllRunnableTasks(); + respondToRequests(node -> { + assertThat(node, is(otherNode)); + return new PeersResponse(Optional.of(unreachableMaster), emptyList(), randomNonNegativeLong()); + }); + } + }, + PeerFinder.class, + new MockLogAppender.SeenEventExpectation( + "discovery result", + "org.elasticsearch.discovery.PeerFinder", + Level.WARN, + "address [" + unreachableMaster.getAddress() + "]* [current master according to *node-from-hosts-list*" + ) + ); + + assertFoundPeers(otherNode); + assertThat(peerFinder.discoveredMasterNode, nullValue()); + assertFalse(peerFinder.discoveredMasterTerm.isPresent()); + } + public void testReconnectsToDisconnectedNodes() { final DiscoveryNode otherNode = newDiscoveryNode("original-node"); providedAddresses.add(otherNode.getAddress()); diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 6a87c0f704600..6577148d78c7b 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -9,8 +9,8 @@ package org.elasticsearch.index.fieldstats; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.rest.RestStatus; @@ -88,7 +88,7 @@ private void assertRequestCacheStats(long expectedHits, long expectedMisses) { } private void refreshIndex() { - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh("index").get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh("index").get(); assertThat(refreshResponse.getSuccessfulShards(), equalTo(refreshResponse.getSuccessfulShards())); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java index e43fa379054bf..c3d2d6a3f194b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldTypeTests.java @@ -8,6 +8,11 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.queries.function.FunctionQuery; +import org.apache.lucene.queries.function.valuesource.ByteVectorSimilarityFunction; +import org.apache.lucene.queries.function.valuesource.FloatVectorSimilarityFunction; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.KnnByteVectorQuery; import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; @@ -158,6 +163,64 @@ public void testCreateNestedKnnQuery() { } } + public void testExactKnnQuery() { + int dims = randomIntBetween(2, 2048); + { + DenseVectorFieldType field = new DenseVectorFieldType( + "f", + IndexVersion.current(), + DenseVectorFieldMapper.ElementType.FLOAT, + dims, + true, + VectorSimilarity.COSINE, + Collections.emptyMap() + ); + float[] queryVector = new float[dims]; + for (int i = 0; i < dims; i++) { + queryVector[i] = randomFloat(); + } + Query query = field.createExactKnnQuery(queryVector); + assertTrue(query instanceof BooleanQuery); + BooleanQuery booleanQuery = (BooleanQuery) query; + boolean foundFunction = false; + for (BooleanClause clause : booleanQuery) { + if (clause.getQuery() instanceof FunctionQuery functionQuery) { + foundFunction = true; + assertTrue(functionQuery.getValueSource() instanceof FloatVectorSimilarityFunction); + } + } + assertTrue("Unable to find FloatVectorSimilarityFunction in created BooleanQuery", foundFunction); + } + { + DenseVectorFieldType field = new DenseVectorFieldType( + "f", + IndexVersion.current(), + DenseVectorFieldMapper.ElementType.BYTE, + dims, + true, + VectorSimilarity.COSINE, + Collections.emptyMap() + ); + byte[] queryVector = new byte[dims]; + float[] floatQueryVector = new float[dims]; + for (int i = 0; i < dims; i++) { + queryVector[i] = randomByte(); + floatQueryVector[i] = queryVector[i]; + } + Query query = field.createExactKnnQuery(floatQueryVector); + assertTrue(query instanceof BooleanQuery); + BooleanQuery booleanQuery = (BooleanQuery) query; + boolean foundFunction = false; + for (BooleanClause clause : booleanQuery) { + if (clause.getQuery() instanceof FunctionQuery functionQuery) { + foundFunction = true; + assertTrue(functionQuery.getValueSource() instanceof ByteVectorSimilarityFunction); + } + } + assertTrue("Unable to find FloatVectorSimilarityFunction in created BooleanQuery", foundFunction); + } + } + public void testFloatCreateKnnQuery() { DenseVectorFieldType unindexedField = new DenseVectorFieldType( "f", diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 516f65111afca..137e0cb348a9c 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -28,8 +28,12 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.search.vectors.ExactKnnQueryBuilder; +import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.TransportVersionUtils; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.hamcrest.Matchers; import java.io.IOException; @@ -48,6 +52,9 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase { + private static final String VECTOR_FIELD = "vector"; + private static final int VECTOR_DIMENSION = 3; + @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { mapperService.merge( @@ -76,6 +83,27 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws ), MapperService.MergeReason.MAPPING_UPDATE ); + XContentBuilder builder = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("nested1") + .field("type", "nested") + .startObject("properties") + .startObject(VECTOR_FIELD) + .field("type", "dense_vector") + .field("dims", VECTOR_DIMENSION) + .field("index", true) + .field("similarity", "cosine") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject(); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(builder)), + MapperService.MergeReason.MAPPING_UPDATE + ); } /** @@ -233,6 +261,27 @@ public void testMustRewrite() throws IOException { assertEquals("Rewrite first", e.getMessage()); } + public void testKnnRewriteForInnerHits() throws IOException { + SearchExecutionContext context = createSearchExecutionContext(); + context.setAllowUnmappedFields(true); + KnnVectorQueryBuilder innerQueryBuilder = new KnnVectorQueryBuilder( + "nested1." + VECTOR_FIELD, + new float[] { 1.0f, 2.0f, 3.0f }, + 1, + null + ); + NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder( + "nested1", + innerQueryBuilder, + RandomPicks.randomFrom(random(), ScoreMode.values()) + ); + InnerHitsRewriteContext rewriteContext = new InnerHitsRewriteContext(context.getParserConfig(), context::nowInMillis); + QueryBuilder queryBuilder = Rewriteable.rewrite(nestedQueryBuilder, rewriteContext, true); + assertTrue(queryBuilder instanceof NestedQueryBuilder); + NestedQueryBuilder rewritten = (NestedQueryBuilder) queryBuilder; + assertTrue(rewritten.query() instanceof ExactKnnQueryBuilder); + } + public void testIgnoreUnmapped() throws IOException { final NestedQueryBuilder queryBuilder = new NestedQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None); queryBuilder.ignoreUnmapped(true); diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java index 7c85cba4c34eb..ee35491a74d00 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.persistent; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; @@ -253,12 +253,12 @@ public void testParamsStatusAndNodeTaskAreDelegated() throws Exception { public void testTaskCancellation() { AtomicLong capturedTaskId = new AtomicLong(); - AtomicReference> capturedListener = new AtomicReference<>(); + AtomicReference> capturedListener = new AtomicReference<>(); Client client = mock(Client.class); when(client.settings()).thenReturn(Settings.EMPTY); PersistentTasksService persistentTasksService = new PersistentTasksService(null, null, client) { @Override - void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { + void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { capturedTaskId.set(taskId); capturedListener.set(listener); } @@ -327,8 +327,7 @@ public void sendCompletionRequest( // That should trigger cancellation request assertThat(capturedTaskId.get(), equalTo(localId)); // Notify successful cancellation - capturedListener.get() - .onResponse(new CancelTasksResponse(Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); + capturedListener.get().onResponse(new ListTasksResponse(Collections.emptyList(), Collections.emptyList(), Collections.emptyList())); // finish or fail task if (randomBoolean()) { @@ -349,7 +348,7 @@ public void testTaskLocalAbort() { when(client.settings()).thenReturn(Settings.EMPTY); PersistentTasksService persistentTasksService = new PersistentTasksService(null, null, client) { @Override - void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { + void sendCancelRequest(final long taskId, final String reason, final ActionListener listener) { fail("Shouldn't be called during local abort"); } diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index e9bf6f83f5bbc..40bdc3da37242 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.Index; @@ -143,7 +144,7 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp @Override protected Writeable.Reader instanceReader() { - return SearchHit::readFrom; + return in -> SearchHit.readFrom(in, randomBoolean()); } @Override @@ -159,16 +160,20 @@ protected SearchHit mutateInstance(SearchHit instance) { public void testFromXContent() throws IOException { XContentType xContentType = randomFrom(XContentType.values()).canonical(); SearchHit searchHit = createTestItem(xContentType, true, false); - boolean humanReadable = randomBoolean(); - BytesReference originalBytes = toShuffledXContent(searchHit, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); - SearchHit parsed; - try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { - parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); - assertNull(parser.nextToken()); + try { + boolean humanReadable = randomBoolean(); + BytesReference originalBytes = toShuffledXContent(searchHit, xContentType, ToXContent.EMPTY_PARAMS, humanReadable); + SearchHit parsed; + try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + parser.nextToken(); // jump to first START_OBJECT + parsed = SearchHit.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); + } finally { + searchHit.decRef(); } - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, humanReadable), xContentType); } /** @@ -184,22 +189,26 @@ public void testFromXContent() throws IOException { public void testFromXContentLenientParsing() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); SearchHit searchHit = createTestItem(xContentType, true, true); - BytesReference originalBytes = toXContent(searchHit, xContentType, true); - Predicate pathsToExclude = path -> path.endsWith("highlight") - || path.contains("fields") - || path.contains("_source") - || path.contains("inner_hits") - || path.isEmpty(); - BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random()); - - SearchHit parsed; - try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { - parser.nextToken(); // jump to first START_OBJECT - parsed = SearchHit.fromXContent(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); - assertNull(parser.nextToken()); + try { + BytesReference originalBytes = toXContent(searchHit, xContentType, true); + Predicate pathsToExclude = path -> path.endsWith("highlight") + || path.contains("fields") + || path.contains("_source") + || path.contains("inner_hits") + || path.isEmpty(); + BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, pathsToExclude, random()); + + SearchHit parsed; + try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { + parser.nextToken(); // jump to first START_OBJECT + parsed = SearchHit.fromXContent(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType); + } finally { + searchHit.decRef(); } - assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType); } /** @@ -221,15 +230,19 @@ public void testFromXContentWithoutTypeAndId() throws IOException { public void testToXContent() throws IOException { SearchHit searchHit = new SearchHit(1, "id1"); - searchHit.score(1.5f); - XContentBuilder builder = JsonXContent.contentBuilder(); - searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals(""" - {"_id":"id1","_score":1.5}""", Strings.toString(builder)); + try { + searchHit.score(1.5f); + XContentBuilder builder = JsonXContent.contentBuilder(); + searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertEquals(""" + {"_id":"id1","_score":1.5}""", Strings.toString(builder)); + } finally { + searchHit.decRef(); + } } public void testRankToXContent() throws IOException { - SearchHit searchHit = new SearchHit(1, "id1"); + SearchHit searchHit = SearchHit.unpooled(1, "id1"); searchHit.setRank(1); XContentBuilder builder = JsonXContent.contentBuilder(); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -264,30 +277,42 @@ public void testSerializeShardTarget() throws Exception { hit2.shard(target); SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); - - TransportVersion version = TransportVersionUtils.randomVersion(random()); - SearchHits results = copyWriteable(hits, getNamedWriteableRegistry(), SearchHits::new, version); - SearchShardTarget deserializedTarget = results.getAt(0).getShard(); - assertThat(deserializedTarget, equalTo(target)); - assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); - assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); - assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); - assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue()); - for (SearchHit hit : results) { - assertEquals(clusterAlias, hit.getClusterAlias()); - if (hit.getInnerHits() != null) { - for (SearchHits innerhits : hit.getInnerHits().values()) { - for (SearchHit innerHit : innerhits) { - assertEquals(clusterAlias, innerHit.getClusterAlias()); + try { + TransportVersion version = TransportVersionUtils.randomVersion(random()); + SearchHits results = copyWriteable( + hits, + getNamedWriteableRegistry(), + (StreamInput in) -> SearchHits.readFrom(in, randomBoolean()), + version + ); + try { + SearchShardTarget deserializedTarget = results.getAt(0).getShard(); + assertThat(deserializedTarget, equalTo(target)); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue()); + for (SearchHit hit : results) { + assertEquals(clusterAlias, hit.getClusterAlias()); + if (hit.getInnerHits() != null) { + for (SearchHits innerhits : hit.getInnerHits().values()) { + for (SearchHit innerHit : innerhits) { + assertEquals(clusterAlias, innerHit.getClusterAlias()); + } + } } } + assertThat(results.getAt(1).getShard(), equalTo(target)); + } finally { + results.decRef(); } + } finally { + hits.decRef(); } - assertThat(results.getAt(1).getShard(), equalTo(target)); } public void testNullSource() { - SearchHit searchHit = new SearchHit(0, "_id"); + SearchHit searchHit = SearchHit.unpooled(0, "_id"); assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); @@ -299,7 +324,7 @@ public void testNullSource() { } public void testHasSource() { - SearchHit searchHit = new SearchHit(randomInt()); + SearchHit searchHit = SearchHit.unpooled(randomInt()); assertFalse(searchHit.hasSource()); searchHit.sourceRef(new BytesArray("{}")); assertTrue(searchHit.hasSource()); @@ -376,7 +401,7 @@ public void testToXContentEmptyFields() throws IOException { Map fields = new HashMap<>(); fields.put("foo", new DocumentField("foo", Collections.emptyList())); fields.put("bar", new DocumentField("bar", Collections.emptyList())); - SearchHit hit = new SearchHit(0, "_id"); + SearchHit hit = SearchHit.unpooled(0, "_id"); hit.addDocumentFields(fields, Map.of()); { BytesReference originalBytes = toShuffledXContent(hit, XContentType.JSON, ToXContent.EMPTY_PARAMS, randomBoolean()); @@ -389,13 +414,17 @@ public void testToXContentEmptyFields() throws IOException { assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertNull(parser.nextToken()); } - assertThat(parsed.getFields().size(), equalTo(0)); + try { + assertThat(parsed.getFields().size(), equalTo(0)); + } finally { + parsed.decRef(); + } } fields = new HashMap<>(); fields.put("foo", new DocumentField("foo", Collections.emptyList())); fields.put("bar", new DocumentField("bar", Collections.singletonList("value"))); - hit = new SearchHit(0, "_id"); + hit = SearchHit.unpooled(0, "_id"); hit.addDocumentFields(fields, Collections.emptyMap()); { BytesReference originalBytes = toShuffledXContent(hit, XContentType.JSON, ToXContent.EMPTY_PARAMS, randomBoolean()); @@ -412,7 +441,7 @@ public void testToXContentEmptyFields() throws IOException { Map metadata = new HashMap<>(); metadata.put("_routing", new DocumentField("_routing", Collections.emptyList())); - hit = new SearchHit(0, "_id"); + hit = SearchHit.unpooled(0, "_id"); hit.addDocumentFields(fields, Collections.emptyMap()); { BytesReference originalBytes = toShuffledXContent(hit, XContentType.JSON, ToXContent.EMPTY_PARAMS, randomBoolean()); @@ -427,7 +456,13 @@ public void testToXContentEmptyFields() throws IOException { assertThat(parsed.getFields().get("bar").getValues(), equalTo(Collections.singletonList("value"))); assertNull(parsed.getFields().get("_routing")); } + } + @Override + protected void dispose(SearchHit searchHit) { + if (searchHit != null) { + searchHit.decRef(); + } } static Explanation createExplanation(int depth) { diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 1e720064dab56..4ca3c5b8dd46e 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -118,7 +118,7 @@ protected SearchHits mutateInstance(SearchHits instance) { } else { totalHits = null; } - return new SearchHits(instance.getHits(), totalHits, instance.getMaxScore()); + return new SearchHits(instance.asUnpooled().getHits(), totalHits, instance.getMaxScore()); case 2: final float maxScore; if (Float.isNaN(instance.getMaxScore())) { @@ -126,7 +126,7 @@ protected SearchHits mutateInstance(SearchHits instance) { } else { maxScore = Float.NaN; } - return new SearchHits(instance.getHits(), instance.getTotalHits(), maxScore); + return new SearchHits(instance.asUnpooled().getHits(), instance.getTotalHits(), maxScore); case 3: SortField[] sortFields; if (instance.getSortFields() == null) { @@ -135,7 +135,7 @@ protected SearchHits mutateInstance(SearchHits instance) { sortFields = randomBoolean() ? createSortFields(instance.getSortFields().length + 1) : null; } return new SearchHits( - instance.getHits(), + instance.asUnpooled().getHits(), instance.getTotalHits(), instance.getMaxScore(), sortFields, @@ -150,7 +150,7 @@ protected SearchHits mutateInstance(SearchHits instance) { collapseField = randomBoolean() ? instance.getCollapseField() + randomAlphaOfLengthBetween(2, 5) : null; } return new SearchHits( - instance.getHits(), + instance.asUnpooled().getHits(), instance.getTotalHits(), instance.getMaxScore(), instance.getSortFields(), @@ -165,7 +165,7 @@ protected SearchHits mutateInstance(SearchHits instance) { collapseValues = randomBoolean() ? createCollapseValues(instance.getCollapseValues().length + 1) : null; } return new SearchHits( - instance.getHits(), + instance.asUnpooled().getHits(), instance.getTotalHits(), instance.getMaxScore(), instance.getSortFields(), @@ -177,6 +177,11 @@ protected SearchHits mutateInstance(SearchHits instance) { } } + @Override + protected void dispose(SearchHits searchHits) { + searchHits.decRef(); + } + @Override protected Predicate getRandomFieldsExcludeFilter() { return path -> (path.isEmpty() @@ -193,7 +198,7 @@ protected String[] getShuffleFieldsExceptions() { @Override protected Writeable.Reader instanceReader() { - return SearchHits::new; + return in -> SearchHits.readFrom(in, randomBoolean()); } @Override @@ -223,15 +228,19 @@ protected SearchHits doParseInstance(XContentParser parser) throws IOException { SearchHits searchHits = SearchHits.fromXContent(parser); assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - return searchHits; + try { + return searchHits.asUnpooled(); + } finally { + searchHits.decRef(); + } } public void testToXContent() throws IOException { - SearchHit[] hits = new SearchHit[] { new SearchHit(1, "id1"), new SearchHit(2, "id2") }; + SearchHit[] hits = new SearchHit[] { SearchHit.unpooled(1, "id1"), SearchHit.unpooled(2, "id2") }; long totalHits = 1000; float maxScore = 1.5f; - SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); ChunkedToXContent.wrapAsToXContent(searchHits).toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -251,7 +260,10 @@ public void testToXContent() throws IOException { public void testFromXContentWithShards() throws IOException { for (boolean withExplanation : new boolean[] { true, false }) { - final SearchHit[] hits = new SearchHit[] { new SearchHit(1, "id1"), new SearchHit(2, "id2"), new SearchHit(10, "id10") }; + final SearchHit[] hits = new SearchHit[] { + SearchHit.unpooled(1, "id1"), + SearchHit.unpooled(2, "id2"), + SearchHit.unpooled(10, "id10") }; for (SearchHit hit : hits) { String index = randomAlphaOfLengthBetween(5, 10); @@ -269,7 +281,7 @@ public void testFromXContentWithShards() throws IOException { long totalHits = 1000; float maxScore = 1.5f; - SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); XContentType xContentType = randomFrom(XContentType.values()).canonical(); BytesReference bytes = toShuffledXContent( ChunkedToXContent.wrapAsToXContent(searchHits), @@ -304,7 +316,6 @@ public void testFromXContentWithShards() throws IOException { } } } - } } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 57974cff0d03c..6a8ac3d1aa876 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -425,6 +425,7 @@ public CheckedBiConsumer getReque "combined_fields", "dis_max", "exists", + "exact_knn", "function_score", "fuzzy", "geo_bounding_box", diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java index bbeeb855f8d18..99be8590e06f2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java @@ -11,10 +11,14 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; @@ -603,7 +607,10 @@ private void testSearchCase( final Consumer verify ) throws IOException { try (Directory directory = newDirectory()) { - try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + IndexWriterConfig config = LuceneTestCase.newIndexWriterConfig(random(), new MockAnalyzer(random())); + // Use LogDocMergePolicy to avoid randomization issues with the doc retrieval order. + config.setMergePolicy(new LogDocMergePolicy()); + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, config)) { indexSampleData(dataset, indexWriter, multipleSegments); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 7d3799b2db35d..1052987aabbdd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -159,13 +159,13 @@ private InternalTopHits createTestInstance( Map searchHitFields = new HashMap<>(); scoreDocs[i] = docBuilder.apply(docId, score); - hits[i] = new SearchHit(docId, Integer.toString(i)); + hits[i] = SearchHit.unpooled(docId, Integer.toString(i)); hits[i].addDocumentFields(searchHitFields, Collections.emptyMap()); hits[i].score(score); } int totalHits = between(actualSize, 500000); sort(hits, scoreDocs, comparator); - SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), maxScore); TopDocs topDocs = topDocsBuilder.apply(new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), scoreDocs); // Lucene's TopDocs initializes the maxScore to Float.NaN, if there is no maxScore @@ -276,16 +276,20 @@ protected void assertReduced(InternalTopHits reduced, List inpu new TotalHits(totalHits, relation), maxScore == Float.NEGATIVE_INFINITY ? Float.NaN : maxScore ); - assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits); + try { + assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits); + } finally { + expectedHits.decRef(); + } } public void testGetProperty() { // Create a SearchHit containing: { "foo": 1000.0 } and use it to initialize an InternalTopHits instance. - SearchHit hit = new SearchHit(0); + SearchHit hit = SearchHit.unpooled(0); hit = hit.sourceRef(Source.fromMap(Map.of("foo", 1000.0), XContentType.YAML).internalSourceRef()); hit.sortValues(new Object[] { 10.0 }, new DocValueFormat[] { DocValueFormat.RAW }); hit.score(1.0f); - SearchHits hits = new SearchHits(new SearchHit[] { hit }, null, 0); + SearchHits hits = SearchHits.unpooled(new SearchHit[] { hit }, null, 0); InternalTopHits internalTopHits = new InternalTopHits("test", 0, 0, null, hits, null); assertEquals(internalTopHits, internalTopHits.getProperty(Collections.emptyList())); @@ -301,7 +305,7 @@ public void testGetProperty() { expectThrows(IllegalArgumentException.class, () -> internalTopHits.getProperty(List.of("_sort"))); // Two SearchHit instances are not allowed, only the first will be used without assertion. - hits = new SearchHits(new SearchHit[] { hit, hit }, null, 0); + hits = SearchHits.unpooled(new SearchHit[] { hit, hit }, null, 0); InternalTopHits internalTopHits3 = new InternalTopHits("test", 0, 0, null, hits, null); expectThrows(IllegalArgumentException.class, () -> internalTopHits3.getProperty(List.of("foo"))); } @@ -397,7 +401,7 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { int from = instance.getFrom(); int size = instance.getSize(); TopDocsAndMaxScore topDocs = instance.getTopDocs(); - SearchHits searchHits = instance.getHits(); + SearchHits searchHits = instance.getHits().asUnpooled(); Map metadata = instance.getMetadata(); switch (between(0, 5)) { case 0 -> name += randomAlphaOfLength(5); @@ -415,7 +419,7 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { searchHits.getTotalHits().value + between(1, 100), randomFrom(TotalHits.Relation.values()) ); - searchHits = new SearchHits(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); + searchHits = SearchHits.unpooled(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); } case 5 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java index f242e19012a35..0fe660e56822c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestStateTests.java @@ -194,7 +194,7 @@ public void testSerialization() throws IOException { backwardsCompatible.add(i); } - TDigestState serialized = writeToAndReadFrom(state, TransportVersions.V_8_500_020); + TDigestState serialized = writeToAndReadFrom(state, TransportVersions.V_8_9_X); assertEquals(serialized, state); TDigestState serializedBackwardsCompatible = writeToAndReadFrom(state, TransportVersions.V_8_8_1); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java index 4c8484be200e5..f8af8a2e3109b 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchPhaseDocsIteratorTests.java @@ -81,6 +81,7 @@ protected SearchHit nextDoc(int doc) { assertThat(hits.length, equalTo(docs.length)); for (int i = 0; i < hits.length; i++) { assertThat(hits[i].docId(), equalTo(docs[i])); + hits[i].decRef(); } reader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java index e0a26fbc67ffd..a5371e7b0b00a 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchFieldsPhaseTests.java @@ -82,7 +82,7 @@ public void testDocValueFetcher() throws IOException { for (LeafReaderContext context : reader.leaves()) { processor.setNextReader(context); for (int doc = 0; doc < context.reader().maxDoc(); doc++) { - SearchHit searchHit = new SearchHit(doc + context.docBase); + SearchHit searchHit = SearchHit.unpooled(doc + context.docBase); processor.process(new FetchSubPhase.HitContext(searchHit, context, doc, Map.of(), Source.empty(null))); assertNotNull(searchHit.getFields().get("field")); } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 620706a01c88f..3a4d67ae281f2 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -170,7 +170,7 @@ private HitContext hitExecuteMultiple( when(sec.isSourceEnabled()).thenReturn(sourceBuilder != null); when(fetchContext.getSearchExecutionContext()).thenReturn(sec); - final SearchHit searchHit = new SearchHit(1, null, nestedIdentity); + final SearchHit searchHit = SearchHit.unpooled(1, null, nestedIdentity); // We don't need a real index, just a LeafReaderContext which cannot be mocked. MemoryIndex index = new MemoryIndex(); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java index 7a1751dbd41fc..be36d72304bd0 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FieldFetcherTests.java @@ -1169,7 +1169,7 @@ public void testNestedGrouping() throws IOException { """; var results = fetchFields(mapperService, source, fieldAndFormatList("*", null, false)); - SearchHit searchHit = new SearchHit(0); + SearchHit searchHit = SearchHit.unpooled(0); searchHit.addDocumentFields(results, Map.of()); assertThat(Strings.toString(searchHit), containsString("\"ml.top_classes\":")); } diff --git a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java index ed92bdb1e5919..b16e8f68c7e32 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ShardSearchRequestTests.java @@ -237,7 +237,7 @@ public void testChannelVersion() throws Exception { version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_8_0, TransportVersion.current()); } if (request.source() != null && request.source().subSearches().size() >= 2) { - version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_500_020, TransportVersion.current()); + version = TransportVersionUtils.randomVersionBetween(random(), TransportVersions.V_8_9_X, TransportVersion.current()); } request = copyWriteable(request, namedWriteableRegistry, ShardSearchRequest::new, version); channelVersion = TransportVersion.min(channelVersion, version); diff --git a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java index f42ca49dc14b9..7aece1476a99d 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java +++ b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java @@ -67,7 +67,7 @@ public ThrowingQueryBuilder(StreamInput in) throws IOException { this.randomUID = in.readLong(); this.failure = in.readException(); this.shardId = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.index = in.readOptionalString(); } else { this.index = null; @@ -79,7 +79,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeLong(randomUID); out.writeException(failure); out.writeVInt(shardId); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeOptionalString(index); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java index 8a82ae8ce7268..42fe65c8d14ef 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestionOptionTests.java @@ -54,6 +54,9 @@ public static Option createTestItem() { } Option option = new CompletionSuggestion.Entry.Option(docId, text, score, contexts); option.setHit(hit); + if (hit != null) { + hit.decRef(); + } return option; } diff --git a/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java new file mode 100644 index 0000000000000..02093d9fa0e44 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilderTests.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.vectors; + +import org.apache.lucene.queries.function.FunctionQuery; +import org.apache.lucene.queries.function.valuesource.FloatVectorSimilarityFunction; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.test.TestGeoShapeFieldMapperPlugin; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Locale; + +public class ExactKnnQueryBuilderTests extends AbstractQueryTestCase { + + private static final String VECTOR_FIELD = "vector"; + private static final int VECTOR_DIMENSION = 3; + + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject(VECTOR_FIELD) + .field("type", "dense_vector") + .field("dims", VECTOR_DIMENSION) + .field("index", true) + .field("similarity", "cosine") + .endObject() + .endObject() + .endObject(); + mapperService.merge( + MapperService.SINGLE_MAPPING_NAME, + new CompressedXContent(Strings.toString(builder)), + MapperService.MergeReason.MAPPING_UPDATE + ); + } + + @Override + protected Collection> getPlugins() { + return List.of(TestGeoShapeFieldMapperPlugin.class); + } + + @Override + protected ExactKnnQueryBuilder doCreateTestQueryBuilder() { + float[] query = new float[VECTOR_DIMENSION]; + for (int i = 0; i < VECTOR_DIMENSION; i++) { + query[i] = randomFloat(); + } + return new ExactKnnQueryBuilder(query, VECTOR_FIELD); + } + + @Override + public void testValidOutput() { + ExactKnnQueryBuilder query = new ExactKnnQueryBuilder(new float[] { 1.0f, 2.0f, 3.0f }, "field"); + String expected = """ + { + "exact_knn" : { + "query" : [ + 1.0, + 2.0, + 3.0 + ], + "field" : "field" + } + }"""; + assertEquals(expected, query.toString()); + } + + @Override + protected void doAssertLuceneQuery(ExactKnnQueryBuilder queryBuilder, Query query, SearchExecutionContext context) throws IOException { + assertTrue(query instanceof BooleanQuery); + BooleanQuery booleanQuery = (BooleanQuery) query; + boolean foundFunction = false; + for (BooleanClause clause : booleanQuery) { + if (clause.getQuery() instanceof FunctionQuery functionQuery) { + foundFunction = true; + assertTrue(functionQuery.getValueSource() instanceof FloatVectorSimilarityFunction); + String description = functionQuery.getValueSource().description().toLowerCase(Locale.ROOT); + if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.NORMALIZED_VECTOR_COSINE)) { + assertTrue(description, description.contains("dot_product")); + } else { + assertTrue(description, description.contains("cosine")); + } + } + } + assertTrue("Unable to find FloatVectorSimilarityFunction in created BooleanQuery", foundFunction); + } + + @Override + public void testUnknownObjectException() { + // Test isn't relevant, since query is never parsed from xContent + } + + @Override + public void testFromXContent() throws IOException { + // Test isn't relevant, since query is never parsed from xContent + } + + @Override + public void testUnknownField() { + // Test isn't relevant, since query is never parsed from xContent + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index eceafe6d12ac9..67bc6bde9c1af 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -23,8 +23,10 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.index.query.InnerHitsRewriteContext; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.AbstractQueryTestCase; @@ -38,6 +40,7 @@ import java.util.List; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; +import static org.elasticsearch.search.vectors.KnnSearchBuilderTests.randomVector; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; @@ -58,12 +61,20 @@ protected KnnScoreDocQueryBuilder doCreateTestQueryBuilder() { for (int doc = 0; doc < numDocs; doc++) { scoreDocs.add(new ScoreDoc(doc, randomFloat())); } - return new KnnScoreDocQueryBuilder(scoreDocs.toArray(new ScoreDoc[0])); + return new KnnScoreDocQueryBuilder( + scoreDocs.toArray(new ScoreDoc[0]), + randomBoolean() ? "field" : null, + randomBoolean() ? randomVector(10) : null + ); } @Override public void testValidOutput() { - KnnScoreDocQueryBuilder query = new KnnScoreDocQueryBuilder(new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }); + KnnScoreDocQueryBuilder query = new KnnScoreDocQueryBuilder( + new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }, + "field", + new float[] { 1.0f, 2.0f } + ); String expected = """ { "knn_score_doc" : { @@ -76,6 +87,11 @@ public void testValidOutput() { "doc" : 5, "score" : 1.6 } + ], + "field" : "field", + "query" : [ + 1.0, + 2.0 ] } }"""; @@ -144,11 +160,36 @@ public void testMustRewrite() throws IOException { } public void testRewriteToMatchNone() throws IOException { - KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(new ScoreDoc[0]); - SearchExecutionContext context = createSearchExecutionContext(); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( + new ScoreDoc[0], + randomBoolean() ? "field" : null, + randomBoolean() ? randomVector(10) : null + ); + QueryRewriteContext context = randomBoolean() + ? new InnerHitsRewriteContext(createSearchExecutionContext().getParserConfig(), System::currentTimeMillis) + : createSearchExecutionContext(); assertEquals(new MatchNoneQueryBuilder(), queryBuilder.rewrite(context)); } + public void testRewriteForInnerHits() throws IOException { + SearchExecutionContext context = createSearchExecutionContext(); + InnerHitsRewriteContext innerHitsRewriteContext = new InnerHitsRewriteContext(context.getParserConfig(), System::currentTimeMillis); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder( + new ScoreDoc[] { new ScoreDoc(0, 4.25f), new ScoreDoc(5, 1.6f) }, + randomAlphaOfLength(10), + randomVector(10) + ); + queryBuilder.boost(randomFloat()); + queryBuilder.queryName(randomAlphaOfLength(10)); + QueryBuilder rewritten = queryBuilder.rewrite(innerHitsRewriteContext); + assertTrue(rewritten instanceof ExactKnnQueryBuilder); + ExactKnnQueryBuilder exactKnnQueryBuilder = (ExactKnnQueryBuilder) rewritten; + assertEquals(queryBuilder.queryVector(), exactKnnQueryBuilder.getQuery()); + assertEquals(queryBuilder.fieldName(), exactKnnQueryBuilder.getField()); + assertEquals(queryBuilder.boost(), exactKnnQueryBuilder.boost(), 0.0001f); + assertEquals(queryBuilder.queryName(), exactKnnQueryBuilder.queryName()); + } + @Override public void testUnknownObjectException() { // Test isn't relevant, since query is never parsed from xContent @@ -185,7 +226,7 @@ public void testScoreDocQueryWeightCount() throws IOException { } ScoreDoc[] scoreDocs = scoreDocsList.toArray(new ScoreDoc[0]); - KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs, "field", randomVector(10)); Query query = queryBuilder.doToQuery(context); final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { @@ -228,7 +269,7 @@ public void testScoreDocQuery() throws IOException { } ScoreDoc[] scoreDocs = scoreDocsList.toArray(new ScoreDoc[0]); - KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs); + KnnScoreDocQueryBuilder queryBuilder = new KnnScoreDocQueryBuilder(scoreDocs, "field", randomVector(10)); final Query query = queryBuilder.doToQuery(context); final Weight w = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0f); diff --git a/server/src/test/java/org/elasticsearch/tasks/CancelTasksResponseTests.java b/server/src/test/java/org/elasticsearch/tasks/CancelTasksResponseTests.java deleted file mode 100644 index 793c1f60c38e6..0000000000000 --- a/server/src/test/java/org/elasticsearch/tasks/CancelTasksResponseTests.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.tasks; - -import org.elasticsearch.action.FailedNodeException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.ChunkedToXContent; -import org.elasticsearch.test.AbstractXContentTestCase; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.net.ConnectException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.function.Predicate; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; - -public class CancelTasksResponseTests extends AbstractXContentTestCase { - - // CancelTasksResponse doesn't directly implement ToXContent because it has multiple XContent representations, so we must wrap here - public record CancelTasksResponseWrapper(CancelTasksResponse in) implements ToXContentObject { - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return ChunkedToXContent.wrapAsToXContent(in.groupedByNone()).toXContent(builder, params); - } - } - - @Override - protected CancelTasksResponseWrapper createTestInstance() { - List randomTasks = randomTasks(); - return new CancelTasksResponseWrapper(new CancelTasksResponse(randomTasks, Collections.emptyList(), Collections.emptyList())); - } - - private static List randomTasks() { - List randomTasks = new ArrayList<>(); - for (int i = 0; i < randomInt(10); i++) { - randomTasks.add(TaskInfoTests.randomTaskInfo()); - } - return randomTasks; - } - - @Override - protected Predicate getRandomFieldsExcludeFilter() { - // status and headers hold arbitrary content, we can't inject random fields in them - return field -> field.endsWith("status") || field.endsWith("headers"); - } - - @Override - protected void assertEqualInstances(CancelTasksResponseWrapper expectedInstanceWrapper, CancelTasksResponseWrapper newInstanceWrapper) { - final var expectedInstance = expectedInstanceWrapper.in(); - final var newInstance = newInstanceWrapper.in(); - assertNotSame(expectedInstance, newInstance); - assertThat(newInstance.getTasks(), equalTo(expectedInstance.getTasks())); - ListTasksResponseTests.assertOnNodeFailures(newInstance.getNodeFailures(), expectedInstance.getNodeFailures()); - ListTasksResponseTests.assertOnTaskFailures(newInstance.getTaskFailures(), expectedInstance.getTaskFailures()); - } - - @Override - protected CancelTasksResponseWrapper doParseInstance(XContentParser parser) { - return new CancelTasksResponseWrapper(CancelTasksResponse.fromXContent(parser)); - } - - @Override - protected boolean supportsUnknownFields() { - return true; - } - - /** - * Test parsing {@link ListTasksResponse} with inner failures as they don't support asserting on xcontent equivalence, given that - * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} - * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. - */ - public void testFromXContentWithFailures() throws IOException { - Supplier instanceSupplier = CancelTasksResponseTests::createTestInstanceWithFailures; - // with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, - // but that does not bother our assertions, as we only want to test that we don't break. - boolean supportsUnknownFields = true; - // exceptions are not of the same type whenever parsed back - boolean assertToXContentEquivalence = false; - AbstractXContentTestCase.testFromXContent( - NUMBER_OF_TEST_RUNS, - instanceSupplier, - supportsUnknownFields, - Strings.EMPTY_ARRAY, - getRandomFieldsExcludeFilter(), - this::createParser, - this::doParseInstance, - this::assertEqualInstances, - assertToXContentEquivalence, - ToXContent.EMPTY_PARAMS - ); - } - - private static CancelTasksResponseWrapper createTestInstanceWithFailures() { - int numNodeFailures = randomIntBetween(0, 3); - List nodeFailures = new ArrayList<>(numNodeFailures); - for (int i = 0; i < numNodeFailures; i++) { - nodeFailures.add(new FailedNodeException(randomAlphaOfLength(5), "error message", new ConnectException())); - } - int numTaskFailures = randomIntBetween(0, 3); - List taskFailures = new ArrayList<>(numTaskFailures); - for (int i = 0; i < numTaskFailures; i++) { - taskFailures.add(new TaskOperationFailure(randomAlphaOfLength(5), randomLong(), new IllegalStateException())); - } - return new CancelTasksResponseWrapper(new CancelTasksResponse(randomTasks(), taskFailures, nodeFailures)); - } - -} diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 947b894124137..d4ea90ee6412a 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -147,8 +147,8 @@ public static MockTransportService startTransport( } SearchHits searchHits; if ("null_target".equals(request.preference())) { - searchHits = new SearchHits( - new SearchHit[] { new SearchHit(0) }, + searchHits = SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(0) }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1F ); diff --git a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java index 92b05ec9bf649..0b1ed05039a6d 100644 --- a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java +++ b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java @@ -53,7 +53,7 @@ public IndexError(StreamInput in) throws IOException { this.shardIds = in.readBoolean() ? in.readIntArray() : null; this.errorType = in.readEnum(ERROR_TYPE.class); this.message = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.stallTimeSeconds = in.readVInt(); } else { this.stallTimeSeconds = 0; @@ -69,7 +69,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeEnum(errorType); out.writeString(message); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeVInt(stallTimeSeconds); } } diff --git a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java index b12bcd8b55880..a5ace3e357f90 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java @@ -11,9 +11,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -466,7 +466,7 @@ private static void createSnapshotThatCanBeUsedDuringRecovery(String indexName) }, 60, TimeUnit.SECONDS); // Force merge to make sure that the resulting snapshot would contain the same index files as the safe commit - ForceMergeResponse forceMergeResponse = client().admin().indices().prepareForceMerge(indexName).setFlush(randomBoolean()).get(); + BroadcastResponse forceMergeResponse = client().admin().indices().prepareForceMerge(indexName).setFlush(randomBoolean()).get(); assertThat(forceMergeResponse.getTotalShards(), equalTo(forceMergeResponse.getSuccessfulShards())); // create repo diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index 2a1cba66f79f9..b6415eea7db2c 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -16,7 +16,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.tests.util.LuceneTestCase; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.network.InetAddresses; @@ -164,7 +164,7 @@ public final void testSnapshotWithLargeSegmentFiles() throws Exception { } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); @@ -191,7 +191,7 @@ public void testRequestStats() throws Exception { } flushAndRefresh(index); - ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); + BroadcastResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); diff --git a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java index a9f1ab7780f7f..526c2104e52ae 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/fetch/HighlighterTestCase.java @@ -73,7 +73,7 @@ protected final Map highlight(MapperService mapperServic Map> storedFields = storedFields(processor.storedFieldsSpec(), doc); Source source = Source.fromBytes(doc.source()); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext( - new SearchHit(0, "id"), + SearchHit.unpooled(0, "id"), ir.leaves().get(0), 0, storedFields, diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java index 9d151e690b071..5dc707e94bdd7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java @@ -11,7 +11,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -117,7 +116,7 @@ protected void cancelSearch(String action) { TaskInfo searchTask = listTasksResponse.getTasks().get(0); logger.info("Cancelling search"); - CancelTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setTargetTaskId(searchTask.taskId()).get(); + ListTasksResponse cancelTasksResponse = clusterAdmin().prepareCancelTasks().setTargetTaskId(searchTask.taskId()).get(); assertThat(cancelTasksResponse.getTasks(), hasSize(1)); assertThat(cancelTasksResponse.getTasks().get(0).taskId(), equalTo(searchTask.taskId())); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index 770c56f9c5952..4df1e745f3bf4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -32,7 +32,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public abstract class AbstractXContentTestCase extends ESTestCase { - protected static final int NUMBER_OF_TEST_RUNS = 20; + public static final int NUMBER_OF_TEST_RUNS = 20; public static XContentTester xContentTester( CheckedBiFunction createParser, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 175594ac8210f..65b28ad874431 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -34,10 +34,7 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.admin.indices.flush.FlushResponse; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; @@ -57,6 +54,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.internal.AdminClient; @@ -1478,9 +1476,9 @@ protected final DocWriteResponse index(String index, String id, String source) { * * @see #waitForRelocation() */ - protected final RefreshResponse refresh(String... indices) { + protected final BroadcastResponse refresh(String... indices) { waitForRelocation(); - RefreshResponse actionGet = indicesAdmin().prepareRefresh(indices) + BroadcastResponse actionGet = indicesAdmin().prepareRefresh(indices) .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_HIDDEN_FORBID_CLOSED) .get(); assertNoFailures(actionGet); @@ -1498,9 +1496,9 @@ protected final void flushAndRefresh(String... indices) { /** * Flush some or all indices in the cluster. */ - protected final FlushResponse flush(String... indices) { + protected final BroadcastResponse flush(String... indices) { waitForRelocation(); - FlushResponse actionGet = indicesAdmin().prepareFlush(indices).get(); + BroadcastResponse actionGet = indicesAdmin().prepareFlush(indices).get(); for (DefaultShardOperationFailedException failure : actionGet.getShardFailures()) { assertThat("unexpected flush failure " + failure.reason(), failure.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); } @@ -1510,9 +1508,9 @@ protected final FlushResponse flush(String... indices) { /** * Waits for all relocations and force merge all indices in the cluster to 1 segment. */ - protected ForceMergeResponse forceMerge() { + protected BroadcastResponse forceMerge() { waitForRelocation(); - ForceMergeResponse actionGet = indicesAdmin().prepareForceMerge().setMaxNumSegments(1).get(); + BroadcastResponse actionGet = indicesAdmin().prepareForceMerge().setMaxNumSegments(1).get(); assertNoFailures(actionGet); return actionGet; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index a2806663ff321..f9996bfc91204 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -32,8 +32,9 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -72,6 +73,7 @@ import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -129,7 +131,6 @@ import static org.elasticsearch.client.RestClient.IGNORE_RESPONSE_CODES_PARAM; import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.test.rest.TestFeatureService.ALL_FEATURES; import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -230,7 +231,22 @@ public enum ProductFeature { private static EnumSet availableFeatures; private static Set nodesVersions; - private static TestFeatureService testFeatureService = ALL_FEATURES; + + private static final TestFeatureService ALL_FEATURES = new TestFeatureService() { + @Override + public boolean clusterHasFeature(String featureId) { + return true; + } + + @Override + public Set getAllSupportedFeatures() { + throw new UnsupportedOperationException( + "Only available to properly initialized TestFeatureService. See ESRestTestCase#createTestFeatureService" + ); + } + }; + + protected static TestFeatureService testFeatureService = ALL_FEATURES; protected static Set getCachedNodesVersions() { assert nodesVersions != null; @@ -1265,15 +1281,33 @@ protected void refreshAllIndices() throws IOException { client().performRequest(refreshRequest); } - protected static RefreshResponse refresh(String index) throws IOException { + protected static BroadcastResponse refresh(String index) throws IOException { return refresh(client(), index); } - protected static RefreshResponse refresh(RestClient client, String index) throws IOException { + private static final ConstructingObjectParser BROADCAST_RESPONSE_PARSER = new ConstructingObjectParser<>( + "broadcast_response", + true, + arg -> { + BaseBroadcastResponse response = (BaseBroadcastResponse) arg[0]; + return new BroadcastResponse( + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()) + ); + } + ); + + static { + BaseBroadcastResponse.declareBroadcastFields(BROADCAST_RESPONSE_PARSER); + } + + protected static BroadcastResponse refresh(RestClient client, String index) throws IOException { Request refreshRequest = new Request("POST", "/" + index + "/_refresh"); Response response = client.performRequest(refreshRequest); try (var parser = responseAsParser(response)) { - return RefreshResponse.fromXContent(parser); + return BROADCAST_RESPONSE_PARSER.apply(parser, null); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java index a73c43f4fc46a..c8647f4e9c43b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestFeatureService.java @@ -9,6 +9,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.Version; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureData; import org.elasticsearch.features.FeatureSpecification; @@ -22,6 +23,7 @@ class ESRestTestFeatureService implements TestFeatureService { private final Predicate historicalFeaturesPredicate; private final Set clusterStateFeatures; + private final Set allSupportedFeatures; ESRestTestFeatureService( List specs, @@ -31,6 +33,12 @@ class ESRestTestFeatureService implements TestFeatureService { var minNodeVersion = nodeVersions.stream().min(Comparator.naturalOrder()); var featureData = FeatureData.createFromSpecifications(specs); var historicalFeatures = featureData.getHistoricalFeatures(); + Set allHistoricalFeatures = historicalFeatures.lastEntry() == null ? Set.of() : historicalFeatures.lastEntry().getValue(); + + this.allSupportedFeatures = Sets.union(clusterStateFeatures, minNodeVersion.>map(v -> { + var historicalFeaturesForVersion = historicalFeatures.floorEntry(v); + return historicalFeaturesForVersion == null ? Set.of() : historicalFeaturesForVersion.getValue(); + }).orElse(allHistoricalFeatures)); this.historicalFeaturesPredicate = minNodeVersion.>map( v -> featureId -> hasHistoricalFeature(historicalFeatures, v, featureId) @@ -43,10 +51,16 @@ private static boolean hasHistoricalFeature(NavigableMap> h return features != null && features.getValue().contains(featureId); } + @Override public boolean clusterHasFeature(String featureId) { if (clusterStateFeatures.contains(featureId)) { return true; } return historicalFeaturesPredicate.test(featureId); } + + @Override + public Set getAllSupportedFeatures() { + return allSupportedFeatures; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index fcd2f781ec58d..ca7684e60d281 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -57,10 +57,10 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); // QA - rolling upgrade tests + public static final NodeFeature DESIRED_NODE_API_SUPPORTED = new NodeFeature("desired_node_supported"); public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); public static final NodeFeature SECURITY_BULK_UPDATE_API_KEY = new NodeFeature("security.api_key_bulk_update"); @UpdateForV9 - public static final NodeFeature WATCHES_VERSION_IN_META = new NodeFeature("watcher.version_in_meta"); @UpdateForV9 public static final NodeFeature SECURITY_ROLE_DESCRIPTORS_OPTIONAL = new NodeFeature("security.role_descriptors_optional"); @@ -76,6 +76,27 @@ public class RestTestLegacyFeatures implements FeatureSpecification { @UpdateForV9 public static final NodeFeature ML_ANALYTICS_MAPPINGS = new NodeFeature("ml.analytics_mappings"); + public static final NodeFeature TSDB_NEW_INDEX_FORMAT = new NodeFeature("indices.tsdb_new_format"); + public static final NodeFeature TSDB_GENERALLY_AVAILABLE = new NodeFeature("indices.tsdb_supported"); + + /* + * A composable index template with no template defined in the body is mistakenly always assumed to not be a time series template. + * Fixed in #98840 + */ + public static final NodeFeature TSDB_EMPTY_TEMPLATE_FIXED = new NodeFeature("indices.tsdb_empty_composable_template_fixed"); + public static final NodeFeature SYNTHETIC_SOURCE_SUPPORTED = new NodeFeature("indices.synthetic_source"); + + public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_SUPPORTED = new NodeFeature("allocator.desired_balance"); + + /* + * Cancel shard allocation command is broken for initial desired balance versions + * and might allocate shard on the node where it is not supposed to be. This + * is fixed by https://github.com/elastic/elasticsearch/pull/93635. + */ + public static final NodeFeature DESIRED_BALANCED_ALLOCATOR_FIXED = new NodeFeature("allocator.desired_balance_fixed"); + public static final NodeFeature INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED = new NodeFeature("settings.indexing_slowlog_level_removed"); + public static final NodeFeature DEPRECATION_WARNINGS_LEAK_FIXED = new NodeFeature("deprecation_warnings_leak_fixed"); + // YAML public static final NodeFeature REST_ELASTIC_PRODUCT_HEADER_PRESENT = new NodeFeature("action.rest.product_header_present"); @@ -103,7 +124,16 @@ public Map getHistoricalFeatures() { entry(TRANSFORM_NEW_API_ENDPOINT, Version.V_7_5_0), entry(ML_INDICES_HIDDEN, Version.V_7_7_0), entry(ML_ANALYTICS_MAPPINGS, Version.V_7_3_0), - entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1) + entry(REST_ELASTIC_PRODUCT_HEADER_PRESENT, Version.V_8_0_1), + entry(DESIRED_NODE_API_SUPPORTED, Version.V_8_1_0), + entry(TSDB_NEW_INDEX_FORMAT, Version.V_8_2_0), + entry(SYNTHETIC_SOURCE_SUPPORTED, Version.V_8_4_0), + entry(DESIRED_BALANCED_ALLOCATOR_SUPPORTED, Version.V_8_6_0), + entry(DESIRED_BALANCED_ALLOCATOR_FIXED, Version.V_8_7_1), + entry(TSDB_GENERALLY_AVAILABLE, Version.V_8_7_0), + entry(TSDB_EMPTY_TEMPLATE_FIXED, Version.V_8_11_0), + entry(INDEXING_SLOWLOG_LEVEL_SETTING_REMOVED, Version.V_8_0_0), + entry(DEPRECATION_WARNINGS_LEAK_FIXED, Version.V_7_17_9) ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java index 9de1fcf631520..332a00ce895a0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java @@ -8,8 +8,10 @@ package org.elasticsearch.test.rest; +import java.util.Set; + public interface TestFeatureService { boolean clusterHasFeature(String featureId); - TestFeatureService ALL_FEATURES = ignored -> true; + Set getAllSupportedFeatures(); } diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java index 6e8397c816b3b..94b80fcc3fab3 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java @@ -29,6 +29,11 @@ private static class MockTestFeatureService implements TestFeatureService { public boolean clusterHasFeature(String featureId) { return true; } + + @Override + public Set getAllSupportedFeatures() { + return Set.of(); + } } public void testHeadersSupportStashedValueReplacement() throws IOException { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java index b0e8b8ae05b51..61917220f10d1 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/boxplot/BoxplotAggregationBuilder.java @@ -83,7 +83,7 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu public BoxplotAggregationBuilder(StreamInput in) throws IOException { super(in); compression = in.readDouble(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { executionHint = in.readOptionalWriteable(TDigestExecutionHint::readFrom); } else { executionHint = TDigestExecutionHint.HIGH_ACCURACY; @@ -98,7 +98,7 @@ public Set metricNames() { @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeDouble(compression); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeOptionalWriteable(executionHint); } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java index f3af195bc6fa1..dc4b096f3a08e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalResetTrackingRate.java @@ -63,7 +63,7 @@ public InternalResetTrackingRate(StreamInput in) throws IOException { this.startTime = in.readLong(); this.endTime = in.readLong(); this.resetCompensation = in.readDouble(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.rateUnit = Rounding.DateTimeUnit.resolve(in.readByte()); } else { this.rateUnit = Rounding.DateTimeUnit.SECOND_OF_MINUTE; @@ -82,7 +82,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeLong(startTime); out.writeLong(endTime); out.writeDouble(resetCompensation); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020) && rateUnit != null) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) && rateUnit != null) { out.writeByte(rateUnit.getId()); } else { out.writeByte(Rounding.DateTimeUnit.SECOND_OF_MINUTE.getId()); diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index f528d99133756..6ec287fe2b980 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.apmdata; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; @@ -19,7 +21,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.yaml.YamlXContent; import org.elasticsearch.xpack.core.ClientHelper; -import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; import org.elasticsearch.xpack.core.template.IngestPipelineConfig; @@ -37,12 +38,14 @@ * Creates all index templates and ingest pipelines that are required for using Elastic APM. */ public class APMIndexTemplateRegistry extends IndexTemplateRegistry { + private static final Logger logger = LogManager.getLogger(APMIndexTemplateRegistry.class); + private final int version; private final Map componentTemplates; private final Map composableIndexTemplates; private final List ingestPipelines; - private final boolean enabled; + private volatile boolean enabled; @SuppressWarnings("unchecked") public APMIndexTemplateRegistry( @@ -75,8 +78,6 @@ public APMIndexTemplateRegistry( Map.Entry> pipelineConfig = map.entrySet().iterator().next(); return loadIngestPipeline(pipelineConfig.getKey(), version, (List) pipelineConfig.getValue().get("dependencies")); }).collect(Collectors.toList()); - - enabled = XPackSettings.APM_DATA_ENABLED.get(nodeSettings); } catch (IOException e) { throw new RuntimeException(e); } @@ -86,6 +87,11 @@ public int getVersion() { return version; } + void setEnabled(boolean enabled) { + logger.info("APM index template registry is {}", enabled ? "enabled" : "disabled"); + this.enabled = enabled; + } + public boolean isEnabled() { return enabled; } diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java index 7acf3a3c972da..f905c17c04b4c 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMPlugin.java @@ -10,36 +10,62 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.core.XPackSettings; import java.util.Collection; +import java.util.Collections; import java.util.List; public class APMPlugin extends Plugin implements ActionPlugin { private static final Logger logger = LogManager.getLogger(APMPlugin.class); - private final SetOnce registry = new SetOnce<>(); + final SetOnce registry = new SetOnce<>(); + + private final boolean enabled; + + // APM_DATA_REGISTRY_ENABLED controls enabling the index template registry. + // + // This setting will be ignored if the plugin is disabled. + static final Setting APM_DATA_REGISTRY_ENABLED = Setting.boolSetting( + "xpack.apm_data.registry.enabled", + true, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public APMPlugin(Settings settings) { + this.enabled = XPackSettings.APM_DATA_ENABLED.get(settings); + } @Override public Collection createComponents(PluginServices services) { + logger.info("APM ingest plugin is {}", enabled ? "enabled" : "disabled"); + Settings settings = services.environment().settings(); + ClusterService clusterService = services.clusterService(); registry.set( - new APMIndexTemplateRegistry( - services.environment().settings(), - services.clusterService(), - services.threadPool(), - services.client(), - services.xContentRegistry() - ) + new APMIndexTemplateRegistry(settings, clusterService, services.threadPool(), services.client(), services.xContentRegistry()) ); - APMIndexTemplateRegistry registryInstance = registry.get(); - logger.info("APM ingest plugin is {}", registryInstance.isEnabled() ? "enabled" : "disabled"); - registryInstance.initialize(); - return List.of(registryInstance); + if (enabled) { + APMIndexTemplateRegistry registryInstance = registry.get(); + registryInstance.setEnabled(APM_DATA_REGISTRY_ENABLED.get(settings)); + clusterService.getClusterSettings().addSettingsUpdateConsumer(APM_DATA_REGISTRY_ENABLED, registryInstance::setEnabled); + registryInstance.initialize(); + } + return Collections.emptyList(); } @Override public void close() { registry.get().close(); } + + @Override + public List> getSettings() { + return List.of(APM_DATA_REGISTRY_ENABLED); + } } diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml index 0ebbb99a1e379..3d9c1490e5a86 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.app@template.yaml @@ -11,12 +11,12 @@ composed_of: - apm@mappings - apm@settings - apm-10d@lifecycle -- apm@custom +- logs@custom - logs-apm.app@custom - ecs@mappings ignore_missing_component_templates: +- logs@custom - logs-apm.app@custom -- apm@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml index 831f7cc404415..4adcf125b2df9 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/logs-apm.error@template.yaml @@ -13,12 +13,12 @@ composed_of: - apm@settings - apm-10d@lifecycle - logs-apm.error@mappings -- apm@custom +- logs@custom - logs-apm.error@custom - ecs@mappings ignore_missing_component_templates: +- logs@custom - logs-apm.error@custom -- apm@custom template: mappings: properties: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml index bdd1fa363bcf4..c2233469110f8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.app@template.yaml @@ -13,11 +13,11 @@ composed_of: - apm-90d@lifecycle - metrics-apm@mappings - metrics-apm@settings -- apm@custom +- metrics@custom - metrics-apm.app@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom +- metrics@custom - metrics-apm.app@custom template: settings: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml index 205784e22e685..3d6d05c58e780 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.internal@template.yaml @@ -14,11 +14,11 @@ composed_of: - apm-90d@lifecycle - metrics-apm@mappings - metrics-apm@settings -- apm@custom +- metrics@custom - metrics-apm.internal@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom +- metrics@custom - metrics-apm.internal@custom template: settings: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml index 6279e044fbfcf..f234b60b1a6ec 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_destination@mappings -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml index 10e4ca5b39a52..aa4f212532e56 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_destination@mappings -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml index dbac0d0d17d89..9b1a26486f482 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_destination.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_destination@mappings -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_destination@custom +- metrics@custom +- metrics-apm.service_destination.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml index af99e419d4a56..c37ec93651d9d 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_summary@mappings -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml index 29c28953d6b40..3a99bc8472c66 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_summary@mappings -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml index bdbd4900df3bb..d829967f7eddf 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_summary.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_summary@mappings -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_summary@custom +- metrics@custom +- metrics-apm.service_summary.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml index 8b4e88391a475..bc21b35d4777f 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_transaction@mappings -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml index 811067f8e6f30..87a1e254baea7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_transaction@mappings -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml index db28b7c56aaab..b45ce0ec0fad7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.service_transaction.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.service_transaction@mappings -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.service_transaction@custom +- metrics@custom +- metrics-apm.service_transaction.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml index 548f73656fda4..51d3c90cb4af8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.10m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.transaction@mappings -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.10m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.10m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml index 6206e7c126c48..8825a93db28dc 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.1m@template.yaml @@ -15,12 +15,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.transaction@mappings -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.1m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.1m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml index 4ad00aecf23a5..e6657fbfe5d28 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/metrics-apm.transaction.60m@template.yaml @@ -16,12 +16,12 @@ composed_of: - metrics-apm@mappings - metrics-apm@settings - metrics-apm.transaction@mappings -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.60m@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom -- metrics-apm.transaction@custom +- metrics@custom +- metrics-apm.transaction.60m@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml index 174faf432eb6e..174aec8c5515a 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.rum@template.yaml @@ -14,14 +14,12 @@ composed_of: - apm-90d@lifecycle - traces-apm@mappings - traces-apm.rum@mappings -- apm@custom -- traces-apm@custom +- traces@custom - traces-apm.rum@custom - ecs@mappings ignore_missing_component_templates: +- traces@custom - traces-apm.rum@custom -- traces-apm@custom -- apm@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml index 8c65c69bc3afa..a39d10897a2ed 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm.sampled@template.yaml @@ -11,10 +11,12 @@ composed_of: - traces@mappings - apm@mappings - apm@settings -- apm@custom +- traces@custom +- traces-apm.sampled@custom - ecs@mappings ignore_missing_component_templates: -- apm@custom +- traces@custom +- traces-apm.sampled@custom template: lifecycle: data_retention: 1h diff --git a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml index fb6670a7f7143..de9c47dfd3f1b 100644 --- a/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/index-templates/traces-apm@template.yaml @@ -13,12 +13,12 @@ composed_of: - apm@settings - apm-10d@lifecycle - traces-apm@mappings -- apm@custom +- traces@custom - traces-apm@custom - ecs@mappings ignore_missing_component_templates: +- traces@custom - traces-apm@custom -- apm@custom template: settings: index: diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index 7dcd6fdd807e4..4f6a5b58ff38d 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.ingest.IngestMetadata; @@ -55,12 +56,15 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import java.util.stream.Stream; -import static org.elasticsearch.xpack.core.XPackSettings.APM_DATA_ENABLED; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -75,20 +79,28 @@ public class APMIndexTemplateRegistryTests extends ESTestCase { @Before public void createRegistryAndClient() { + final ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Stream.concat(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), Set.of(APMPlugin.APM_DATA_REGISTRY_ENABLED).stream()) + .collect(Collectors.toSet()) + ); + threadPool = new TestThreadPool(this.getClass().getName()); client = new VerifyingClient(threadPool); - clusterService = ClusterServiceUtils.createClusterService(threadPool); + clusterService = ClusterServiceUtils.createClusterService(threadPool, clusterSettings); FeatureService featureService = new FeatureService(List.of()); stackTemplateRegistryAccessor = new StackTemplateRegistryAccessor( new StackTemplateRegistry(Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY, featureService) ); + apmIndexTemplateRegistry = new APMIndexTemplateRegistry( - Settings.builder().put(APM_DATA_ENABLED.getKey(), true).build(), + Settings.EMPTY, clusterService, threadPool, client, NamedXContentRegistry.EMPTY ); + apmIndexTemplateRegistry.setEnabled(true); } @After @@ -111,6 +123,28 @@ public void testThatMissingMasterNodeDoesNothing() { apmIndexTemplateRegistry.clusterChanged(event); } + public void testThatDisablingRegistryDoesNothing() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + apmIndexTemplateRegistry.setEnabled(false); + assertThat(apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet(), hasSize(0)); + assertThat(apmIndexTemplateRegistry.getComposableTemplateConfigs().entrySet(), hasSize(0)); + assertThat(apmIndexTemplateRegistry.getIngestPipelines(), hasSize(0)); + + client.setVerifier((a, r, l) -> { + fail("if the registry is disabled nothing should happen"); + return null; + }); + ClusterChangedEvent event = createClusterChangedEvent(Map.of(), Map.of(), nodes); + apmIndexTemplateRegistry.clusterChanged(event); + + apmIndexTemplateRegistry.setEnabled(true); + assertThat(apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet(), not(hasSize(0))); + assertThat(apmIndexTemplateRegistry.getComposableTemplateConfigs().entrySet(), not(hasSize(0))); + assertThat(apmIndexTemplateRegistry.getIngestPipelines(), not(hasSize(0))); + } + public void testThatIndependentTemplatesAreAddedImmediatelyIfMissing() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); @@ -249,6 +283,48 @@ public void testIndexTemplates() throws Exception { assertThat(actualInstalledIngestPipelines.get(), equalTo(0)); } + public void testIndexTemplateConventions() throws Exception { + for (Map.Entry entry : apmIndexTemplateRegistry.getComposableTemplateConfigs().entrySet()) { + final String name = entry.getKey(); + final int atIndex = name.lastIndexOf('@'); + assertThat(atIndex, not(equalTo(-1))); + assertThat(name.substring(atIndex + 1), equalTo("template")); + + final String dataStreamType = name.substring(0, name.indexOf('-')); + assertThat(dataStreamType, isIn(List.of("logs", "metrics", "traces"))); + + final ComposableIndexTemplate template = entry.getValue(); + assertThat(template.indexPatterns().size(), equalTo(1)); + + final String namePrefix = name.substring(0, atIndex); + switch (namePrefix) { + case "logs-apm.app", "metrics-apm.app": + // These two data streams have a service-specific dataset. + assertThat(template.indexPatterns().get(0), equalTo(namePrefix + ".*-*")); + break; + default: + assertThat(template.indexPatterns().get(0), equalTo(namePrefix + "-*")); + break; + } + + // Each index template should be composed of the following optional component templates: + // @custom + // -@custom + final List optionalComponentTemplates = template.composedOf() + .stream() + .filter(t -> template.getIgnoreMissingComponentTemplates().contains(t)) + .toList(); + assertThat(optionalComponentTemplates, containsInAnyOrder(namePrefix + "@custom", dataStreamType + "@custom")); + + // There should be no required custom component templates. + final List requiredCustomComponentTemplates = template.getRequiredComponentTemplates() + .stream() + .filter(t -> t.endsWith("@custom")) + .toList(); + assertThat(requiredCustomComponentTemplates, empty()); + } + } + private Map getIndependentComponentTemplateConfigs() { return apmIndexTemplateRegistry.getComponentTemplateConfigs().entrySet().stream().filter(template -> { Settings settings = template.getValue().template().settings(); diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMPluginTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMPluginTests.java new file mode 100644 index 0000000000000..289852737393e --- /dev/null +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMPluginTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.apmdata; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.XPackSettings; +import org.junit.After; +import org.junit.Before; + +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class APMPluginTests extends ESTestCase { + private APMPlugin apmPlugin; + private ClusterService clusterService; + private ThreadPool threadPool; + + @Before + public void createPlugin() { + final ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + Stream.concat(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), Set.of(APMPlugin.APM_DATA_REGISTRY_ENABLED).stream()) + .collect(Collectors.toSet()) + ); + threadPool = new TestThreadPool(this.getClass().getName()); + clusterService = ClusterServiceUtils.createClusterService(threadPool, clusterSettings); + apmPlugin = new APMPlugin(Settings.builder().put(XPackSettings.APM_DATA_ENABLED.getKey(), true).build()); + } + + private void createComponents() { + Environment mockEnvironment = mock(Environment.class); + when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); + Plugin.PluginServices services = mock(Plugin.PluginServices.class); + when(services.clusterService()).thenReturn(clusterService); + when(services.threadPool()).thenReturn(threadPool); + when(services.environment()).thenReturn(mockEnvironment); + apmPlugin.createComponents(services); + } + + @After + @Override + public void tearDown() throws Exception { + super.tearDown(); + apmPlugin.close(); + threadPool.shutdownNow(); + } + + public void testRegistryEnabledSetting() throws Exception { + createComponents(); + + // By default, the registry is enabled. + assertTrue(apmPlugin.registry.get().isEnabled()); + + // The registry can be disabled/enabled dynamically. + clusterService.getClusterSettings() + .applySettings(Settings.builder().put(APMPlugin.APM_DATA_REGISTRY_ENABLED.getKey(), false).build()); + assertFalse(apmPlugin.registry.get().isEnabled()); + } + + public void testDisablingPluginDisablesRegistry() throws Exception { + apmPlugin = new APMPlugin(Settings.builder().put(XPackSettings.APM_DATA_ENABLED.getKey(), false).build()); + createComponents(); + + // The plugin is disabled, so the registry is disabled too. + assertFalse(apmPlugin.registry.get().isEnabled()); + + // The registry can not be enabled dynamically when the plugin is disabled. + clusterService.getClusterSettings() + .applySettings(Settings.builder().put(APMPlugin.APM_DATA_REGISTRY_ENABLED.getKey(), true).build()); + assertFalse(apmPlugin.registry.get().isEnabled()); + } +} diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_templates.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_templates.yml new file mode 100644 index 0000000000000..62b36926d01dc --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_custom_templates.yml @@ -0,0 +1,76 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + + - do: + cluster.put_component_template: + name: "metrics@custom" + body: + template: + mappings: + properties: + custom_field1: + type: keyword + meta: + source: metrics@custom + custom_field2: + type: keyword + meta: + source: metrics@custom + + - do: + cluster.put_component_template: + name: "metrics-apm.app@custom" + body: + template: + mappings: + properties: + custom_field2: + type: keyword + meta: + source: metrics-apm.app@custom + custom_field3: + type: keyword + meta: + source: metrics-apm.app@custom + +--- +"Test metrics @custom component templates": + - do: + indices.create_data_stream: + name: metrics-apm.app.svc1-testing + - do: + # Wait for cluster state changes to be applied before + # querying field mappings. + cluster.health: + wait_for_events: languid + - do: + indices.get_field_mapping: + index: metrics-apm.app.svc1-testing + fields: custom_field* + - set: {_arbitrary_key_: index} + - match: + $body.$index.mappings: + custom_field1: + full_name: custom_field1 + mapping: + custom_field1: + type: keyword + meta: + source: metrics@custom + custom_field2: + full_name: custom_field2 + mapping: + custom_field2: + type: keyword + meta: + source: metrics-apm.app@custom + custom_field3: + full_name: custom_field3 + mapping: + custom_field3: + type: keyword + meta: + source: metrics-apm.app@custom diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index 1819ad7960006..88ae09fbcdc99 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -192,7 +192,7 @@ private SearchHit[] getSearchHits(String asyncId, String user) throws IOExceptio ) ).getSearchResponse(); try { - return searchResponse.getHits().getHits(); + return searchResponse.getHits().asUnpooled().getHits(); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java index bb3dc5b866b54..3605d6365f867 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/CrossClusterAsyncSearchIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; @@ -1318,7 +1317,7 @@ public void testCancelViaTasksAPI() throws Exception { SearchListenerPlugin.waitSearchStarted(); - ActionFuture cancelFuture; + ActionFuture cancelFuture; try { ListTasksResponse listTasksResponse = client(LOCAL_CLUSTER).admin() .cluster() diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java index c167d74eb78d2..04b0b11ad38d4 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java @@ -13,7 +13,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.search.CCSSingleCoordinatorSearchProgressListener; import org.elasticsearch.action.search.SearchProgressActionListener; import org.elasticsearch.action.search.SearchRequest; @@ -155,7 +155,7 @@ public void cancelTask(Runnable runnable, String reason) { CancelTasksRequest req = new CancelTasksRequest().setTargetTaskId(searchId.getTaskId()).setReason(reason); client.admin().cluster().cancelTasks(req, new ActionListener<>() { @Override - public void onResponse(CancelTasksResponse cancelTasksResponse) { + public void onResponse(ListTasksResponse cancelTasksResponse) { runnable.run(); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 7eb3cca18efd0..6bb3bd5fe14f6 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -980,7 +980,7 @@ public static class ReactiveReason implements AutoscalingDeciderResult.Reason { static final int MAX_AMOUNT_OF_SHARDS = 512; private static final TransportVersion SHARD_IDS_OUTPUT_VERSION = TransportVersions.V_8_4_0; - private static final TransportVersion UNASSIGNED_NODE_DECISIONS_OUTPUT_VERSION = TransportVersions.V_8_500_020; + private static final TransportVersion UNASSIGNED_NODE_DECISIONS_OUTPUT_VERSION = TransportVersions.V_8_9_X; private final String reason; private final long unassigned; diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 7e0e2d1493417..4a3a92aa80bc8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -137,7 +137,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E public static final String CCR_CUSTOM_METADATA_REMOTE_CLUSTER_NAME_KEY = "remote_cluster_name"; public static final String REQUESTED_OPS_MISSING_METADATA_KEY = "es.requested_operations_missing"; - public static final TransportVersion TRANSPORT_VERSION_ACTION_WITH_SHARD_ID = TransportVersions.V_8_500_020; + public static final TransportVersion TRANSPORT_VERSION_ACTION_WITH_SHARD_ID = TransportVersions.V_8_9_X; private final boolean enabled; private final Settings settings; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index ea4bc8c92047a..4ce64bc41d6a1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -15,12 +15,12 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.client.internal.Client; @@ -461,8 +461,8 @@ protected final Index resolveFollowerIndex(String index) { return new Index(index, uuid); } - protected final RefreshResponse refresh(Client client, String... indices) { - RefreshResponse actionGet = client.admin().indices().prepareRefresh(indices).get(); + protected final BroadcastResponse refresh(Client client, String... indices) { + BroadcastResponse actionGet = client.admin().indices().prepareRefresh(indices).get(); assertNoFailures(actionGet); return actionGet; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java index dc5169648e0cd..45b9d557b72b3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java @@ -23,7 +23,7 @@ public class EnterpriseSearchFeatureSetUsage extends XPackFeatureSet.Usage { static final TransportVersion BEHAVIORAL_ANALYTICS_TRANSPORT_VERSION = TransportVersions.V_8_8_1; - static final TransportVersion QUERY_RULES_TRANSPORT_VERSION = TransportVersions.V_8_500_061; + static final TransportVersion QUERY_RULES_TRANSPORT_VERSION = TransportVersions.V_8_10_X; public static final String SEARCH_APPLICATIONS = "search_applications"; public static final String ANALYTICS_COLLECTIONS = "analytics_collections"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java index edac3498ca4e4..91cce4126d3a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datastreams/DataStreamLifecycleFeatureSetUsage.java @@ -48,7 +48,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_020; + return TransportVersions.V_8_9_X; } @Override @@ -112,7 +112,7 @@ public LifecycleStats( } public static LifecycleStats read(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { return new LifecycleStats(in.readVLong(), in.readVLong(), in.readVLong(), in.readDouble(), in.readBoolean()); } else { return INITIAL; @@ -121,7 +121,7 @@ public static LifecycleStats read(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeVLong(dataStreamsWithLifecyclesCount); out.writeVLong(minRetentionMillis); out.writeVLong(maxRetentionMillis); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java index ef93ab914f08f..dcaf5057e9d43 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java @@ -69,7 +69,7 @@ public Request() {} public Request(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) && in.readBoolean()) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) && in.readBoolean()) { this.indexStartTimeMillis = in.readVLong(); this.indexEndTimeMillis = in.readVLong(); } else { @@ -132,7 +132,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(true); out.writeVLong(indexStartTimeMillis); out.writeVLong(indexEndTimeMillis); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java index 2700ed844d063..8d1d4aec6e7c2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java @@ -144,7 +144,7 @@ public DownsampleShardStatus(StreamInput in) throws IOException { numSent = in.readLong(); numIndexed = in.readLong(); numFailed = in.readLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) && in.readBoolean()) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) && in.readBoolean()) { totalShardDocCount = in.readVLong(); lastSourceTimestamp = in.readVLong(); lastTargetTimestamp = in.readVLong(); @@ -254,7 +254,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(numSent); out.writeLong(numIndexed); out.writeLong(numFailed); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(true); out.writeVLong(totalShardDocCount); out.writeVLong(lastSourceTimestamp); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java index 818b45c2b5d00..59ff38b317327 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java @@ -91,7 +91,7 @@ public DownsampleAction(final DateHistogramInterval fixedInterval, final TimeVal public DownsampleAction(StreamInput in) throws IOException { this( new DateHistogramInterval(in), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X) ? TimeValue.parseTimeValue(in.readString(), WAIT_TIMEOUT_FIELD.getPreferredName()) : DEFAULT_WAIT_TIMEOUT ); @@ -100,7 +100,7 @@ public DownsampleAction(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { fixedInterval.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeString(waitTimeout.getStringRep()); } else { out.writeString(DEFAULT_WAIT_TIMEOUT.getStringRep()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java index 12fba46e40689..c316e130ecb81 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java @@ -79,7 +79,7 @@ public Request(StreamInput in) throws IOException { advanceTime = in.readOptionalString(); skipTime = in.readOptionalString(); waitForNormalization = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { refreshRequired = in.readBoolean(); } } @@ -93,7 +93,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(advanceTime); out.writeOptionalString(skipTime); out.writeBoolean(waitForNormalization); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(refreshRequired); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index e8b0041875b07..d819f7d846843 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -334,7 +334,7 @@ public boolean equals(Object obj) { } } - private QueryPage jobsStats; + private final QueryPage jobsStats; public Response(QueryPage jobsStats) { super(Collections.emptyList(), Collections.emptyList()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java index 5341efeec1094..6f64c41c8dee9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java @@ -91,7 +91,7 @@ public Request(StreamInput in) throws IOException { this.part = in.readVInt(); this.totalDefinitionLength = in.readVLong(); this.totalParts = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.allowOverwriting = in.readBoolean(); } else { this.allowOverwriting = false; @@ -148,7 +148,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(part); out.writeVLong(totalDefinitionLength); out.writeVInt(totalParts); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(allowOverwriting); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java index c153cbc2c039b..ed988f952bc97 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java @@ -86,12 +86,12 @@ public Request(StreamInput in) throws IOException { } else { this.merges = List.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.scores = in.readCollectionAsList(StreamInput::readDouble); } else { this.scores = List.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.allowOverwriting = in.readBoolean(); } else { this.allowOverwriting = false; @@ -136,10 +136,10 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { out.writeStringCollection(merges); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeCollection(scores, StreamOutput::writeDouble); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeBoolean(allowOverwriting); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java index 883c94093a2c5..2254959242eab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java @@ -66,7 +66,7 @@ public FlushAcknowledgement(String id, Instant lastFinalizedBucketEnd, Boolean r public FlushAcknowledgement(StreamInput in) throws IOException { id = in.readString(); lastFinalizedBucketEnd = in.readOptionalInstant(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { refreshRequired = in.readBoolean(); } else { refreshRequired = true; @@ -77,7 +77,7 @@ public FlushAcknowledgement(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeOptionalInstant(lastFinalizedBucketEnd); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(refreshRequired); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java index 7596fe75b4173..10b7730b58c9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java @@ -134,12 +134,12 @@ public AsyncStatusResponse(StreamInput in) throws IOException { this.skippedShards = in.readVInt(); this.failedShards = in.readVInt(); this.completionStatus = (this.isRunning == false) ? RestStatus.readFrom(in) : null; - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { this.clusters = in.readOptionalWriteable(SearchResponse.Clusters::new); } else { this.clusters = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { this.completionTimeMillis = in.readOptionalVLong(); } else { this.completionTimeMillis = null; @@ -160,11 +160,11 @@ public void writeTo(StreamOutput out) throws IOException { if (isRunning == false) { RestStatus.writeTo(out, completionStatus); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { // optional since only CCS uses is; it is null for local-only searches out.writeOptionalWriteable(clusters); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { out.writeOptionalVLong(completionTimeMillis); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java index fbc08a0dee8aa..bdb721df2ffd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; /** * A collection of actions types for the Security plugin that need to be available in xpack.core.security and thus cannot be stored @@ -20,4 +21,6 @@ public final class ActionTypes { public static final ActionType RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION = ActionType.localOnly( "cluster:admin/xpack/security/remote_cluster_credentials/reload" ); + + public static final ActionType QUERY_USER_ACTION = ActionType.localOnly("cluster:admin/xpack/security/user/query"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java index e57570ce7385b..5753fa3b4ad7a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/ApiKey.java @@ -46,7 +46,7 @@ */ public final class ApiKey implements ToXContentObject, Writeable { - public static final TransportVersion CROSS_CLUSTER_KEY_VERSION = TransportVersions.V_8_500_020; + public static final TransportVersion CROSS_CLUSTER_KEY_VERSION = TransportVersions.V_8_9_X; public enum Type { /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java index 71e0c98fb0012..a8b14795e2dd8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java @@ -26,7 +26,7 @@ */ public final class GetApiKeyRequest extends ActionRequest { - static TransportVersion API_KEY_ACTIVE_ONLY_PARAM_TRANSPORT_VERSION = TransportVersions.V_8_500_061; + static TransportVersion API_KEY_ACTIVE_ONLY_PARAM_TRANSPORT_VERSION = TransportVersions.V_8_10_X; private final String realmName; private final String userName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java index 73ee4d1f27299..dfad1fe376706 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java @@ -20,7 +20,7 @@ public class AuthenticateResponse extends ActionResponse implements ToXContent { - public static final TransportVersion VERSION_OPERATOR_FIELD = TransportVersions.V_8_500_061; + public static final TransportVersion VERSION_OPERATOR_FIELD = TransportVersions.V_8_10_X; private final Authentication authentication; private final boolean operator; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java new file mode 100644 index 0000000000000..6db7e93b66eda --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.user; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Request for the query Users API.
    + * Model for API requests to the query users API + */ +public final class QueryUserRequest extends ActionRequest { + + @Nullable + private final QueryBuilder queryBuilder; + @Nullable + private final Integer from; + @Nullable + private final Integer size; + @Nullable + private final List fieldSortBuilders; + @Nullable + private final SearchAfterBuilder searchAfterBuilder; + + public QueryUserRequest() { + this(null); + } + + public QueryUserRequest(QueryBuilder queryBuilder) { + this(queryBuilder, null, null, null, null); + } + + public QueryUserRequest( + @Nullable QueryBuilder queryBuilder, + @Nullable Integer from, + @Nullable Integer size, + @Nullable List fieldSortBuilders, + @Nullable SearchAfterBuilder searchAfterBuilder + ) { + this.queryBuilder = queryBuilder; + this.from = from; + this.size = size; + this.fieldSortBuilders = fieldSortBuilders; + this.searchAfterBuilder = searchAfterBuilder; + } + + public QueryBuilder getQueryBuilder() { + return queryBuilder; + } + + public Integer getFrom() { + return from; + } + + public Integer getSize() { + return size; + } + + public List getFieldSortBuilders() { + return fieldSortBuilders; + } + + public SearchAfterBuilder getSearchAfterBuilder() { + return searchAfterBuilder; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (from != null && from < 0) { + validationException = addValidationError("[from] parameter cannot be negative but was [" + from + "]", validationException); + } + if (size != null && size < 0) { + validationException = addValidationError("[size] parameter cannot be negative but was [" + size + "]", validationException); + } + return validationException; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java new file mode 100644 index 0000000000000..57d156cf05ca0 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.user; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Objects; + +/** + * Response for the query Users API.
    + * Model used to serialize information about the Users that were found. + */ +public final class QueryUserResponse extends ActionResponse implements ToXContentObject { + + private final long total; + private final Item[] items; + + public QueryUserResponse(long total, Collection items) { + this.total = total; + Objects.requireNonNull(items, "items must be provided"); + this.items = items.toArray(new Item[0]); + } + + public static QueryUserResponse emptyResponse() { + return new QueryUserResponse(0, Collections.emptyList()); + } + + public long getTotal() { + return total; + } + + public Item[] getItems() { + return items; + } + + public int getCount() { + return items.length; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject().field("total", total).field("count", items.length).array("users", (Object[]) items); + return builder.endObject(); + } + + @Override + public String toString() { + return "QueryUsersResponse{" + "total=" + total + ", items=" + Arrays.toString(items) + '}'; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + TransportAction.localOnly(); + } + + public record Item(User user, @Nullable Object[] sortValues) implements ToXContentObject { + + @Override + public Object[] sortValues() { + return sortValues; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + user.innerToXContent(builder); + if (sortValues != null && sortValues.length > 0) { + builder.array("_sort", sortValues); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return "Item{" + "user=" + user + ", sortValues=" + Arrays.toString(sortValues) + '}'; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index f39eca877432c..2857cbfd1bdd2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -52,7 +52,7 @@ */ public class RoleDescriptor implements ToXContentObject, Writeable { - public static final TransportVersion WORKFLOWS_RESTRICTION_VERSION = TransportVersions.V_8_500_020; + public static final TransportVersion WORKFLOWS_RESTRICTION_VERSION = TransportVersions.V_8_9_X; public static final String ROLE_TYPE = "role"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index f93599cdb98cc..ba6bca802070a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -29,6 +29,7 @@ import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction; import org.elasticsearch.xpack.core.ilm.action.GetStatusAction; import org.elasticsearch.xpack.core.ilm.action.ILMActions; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GrantApiKeyAction; @@ -234,6 +235,7 @@ public class ClusterPrivilegeResolver { GetServiceAccountAction.NAME, GetServiceAccountCredentialsAction.NAME + "*", GetUsersAction.NAME, + ActionTypes.QUERY_USER_ACTION.name(), GetUserPrivilegesAction.NAME, // normally authorized under the "same-user" authz check, but added here for uniformity HasPrivilegesAction.NAME, GetSecuritySettingsAction.NAME diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java index 962e789cac7d6..b16983c6a7ac6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ForceMergeStepTests.java @@ -10,9 +10,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -70,13 +70,13 @@ public void testPerformActionComplete() throws Exception { Step.StepKey stepKey = randomStepKey(); StepKey nextStepKey = randomStepKey(); int maxNumSegments = randomIntBetween(1, 10); - ForceMergeResponse forceMergeResponse = Mockito.mock(ForceMergeResponse.class); + BroadcastResponse forceMergeResponse = Mockito.mock(BroadcastResponse.class); Mockito.when(forceMergeResponse.getStatus()).thenReturn(RestStatus.OK); Mockito.doAnswer(invocationOnMock -> { ForceMergeRequest request = (ForceMergeRequest) invocationOnMock.getArguments()[0]; assertThat(request.maxNumSegments(), equalTo(maxNumSegments)); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; listener.onResponse(forceMergeResponse); return null; }).when(indicesClient).forceMerge(any(), any()); @@ -95,7 +95,7 @@ public void testPerformActionThrowsException() { Step.StepKey stepKey = randomStepKey(); StepKey nextStepKey = randomStepKey(); int maxNumSegments = randomIntBetween(1, 10); - ForceMergeResponse forceMergeResponse = Mockito.mock(ForceMergeResponse.class); + BroadcastResponse forceMergeResponse = Mockito.mock(BroadcastResponse.class); Mockito.when(forceMergeResponse.getStatus()).thenReturn(RestStatus.OK); Mockito.doAnswer(invocationOnMock -> { ForceMergeRequest request = (ForceMergeRequest) invocationOnMock.getArguments()[0]; @@ -103,7 +103,7 @@ public void testPerformActionThrowsException() { assertThat(request.indices()[0], equalTo(indexMetadata.getIndex().getName())); assertThat(request.maxNumSegments(), equalTo(maxNumSegments)); @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; listener.onFailure(exception); return null; }).when(indicesClient).forceMerge(any(), any()); @@ -126,7 +126,7 @@ public void testForcemergeFailsOnSomeShards() { .numberOfReplicas(randomIntBetween(0, 5)) .build(); Index index = indexMetadata.getIndex(); - ForceMergeResponse forceMergeResponse = Mockito.mock(ForceMergeResponse.class); + BroadcastResponse forceMergeResponse = Mockito.mock(BroadcastResponse.class); Mockito.when(forceMergeResponse.getTotalShards()).thenReturn(numberOfShards); Mockito.when(forceMergeResponse.getFailedShards()).thenReturn(numberOfShards - 1); Mockito.when(forceMergeResponse.getStatus()).thenReturn(RestStatus.BAD_REQUEST); @@ -143,7 +143,7 @@ public void testForcemergeFailsOnSomeShards() { Mockito.doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; listener.onResponse(forceMergeResponse); return null; }).when(indicesClient).forceMerge(any(), any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java index 92ba5d2ad4efb..db8ac28dd1b98 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ShrinkStepTests.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.core.ilm; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.action.admin.indices.shrink.ResizeResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; @@ -100,7 +100,7 @@ public void testPerformAction() throws Exception { Mockito.doAnswer(invocation -> { ResizeRequest request = (ResizeRequest) invocation.getArguments()[0]; @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[1]; assertThat(request.getSourceIndex(), equalTo(sourceIndexMetadata.getIndex().getName())); assertThat(request.getTargetIndexRequest().aliases(), equalTo(Collections.emptySet())); @@ -119,7 +119,7 @@ public void testPerformAction() throws Exception { ); } request.setMaxPrimaryShardSize(step.getMaxPrimaryShardSize()); - listener.onResponse(new ResizeResponse(true, true, sourceIndexMetadata.getIndex().getName())); + listener.onResponse(new CreateIndexResponse(true, true, sourceIndexMetadata.getIndex().getName())); return null; }).when(indicesClient).resizeIndex(Mockito.any(), Mockito.any()); @@ -181,8 +181,8 @@ public void testPerformActionIsCompleteForUnAckedRequests() throws Exception { Mockito.doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new ResizeResponse(false, false, indexMetadata.getIndex().getName())); + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new CreateIndexResponse(false, false, indexMetadata.getIndex().getName())); return null; }).when(indicesClient).resizeIndex(Mockito.any(), Mockito.any()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java index a369219bd7c3c..6d85e90dc3108 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FlushJobActionRequestTests.java @@ -53,7 +53,7 @@ protected Writeable.Reader instanceReader() { @Override protected Request mutateInstanceForVersion(Request instance, TransportVersion version) { - if (version.before(TransportVersions.V_8_500_020)) { + if (version.before(TransportVersions.V_8_9_X)) { instance.setRefreshRequired(true); } return instance; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java index ee304f966c9b4..7f37ff85f1fda 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java @@ -72,7 +72,7 @@ protected Writeable.Reader instanceReader() { @Override protected Request mutateInstanceForVersion(Request instance, TransportVersion version) { - if (version.before(TransportVersions.V_8_500_061)) { + if (version.before(TransportVersions.V_8_10_X)) { return new Request( instance.getModelId(), instance.getDefinition(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java new file mode 100644 index 0000000000000..e7d8ef0b65e39 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.core.security.action.user; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.nullValue; + +public class QueryUserRequestTests extends ESTestCase { + public void testValidate() { + final QueryUserRequest request1 = new QueryUserRequest( + null, + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + null, + null + ); + assertThat(request1.validate(), nullValue()); + + final QueryUserRequest request2 = new QueryUserRequest( + null, + randomIntBetween(Integer.MIN_VALUE, -1), + randomIntBetween(0, Integer.MAX_VALUE), + null, + null + ); + assertThat(request2.validate().getMessage(), containsString("[from] parameter cannot be negative")); + + final QueryUserRequest request3 = new QueryUserRequest( + null, + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(Integer.MIN_VALUE, -1), + null, + null + ); + assertThat(request3.validate().getMessage(), containsString("[size] parameter cannot be negative")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index bddc30b8d7b83..21827c4b9a373 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.core.enrich.action.ExecuteEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.GetEnrichPolicyAction; import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyAction; @@ -281,6 +282,7 @@ public void testReadSecurityPrivilege() { GetServiceAccountAction.NAME, GetServiceAccountCredentialsAction.NAME, GetUsersAction.NAME, + ActionTypes.QUERY_USER_ACTION.name(), HasPrivilegesAction.NAME, GetUserPrivilegesAction.NAME, GetSecuritySettingsAction.NAME @@ -339,16 +341,11 @@ public void testManageUserProfilePrivilege() { "cluster:admin/xpack/security/role/get", "cluster:admin/xpack/security/role/delete" ); - verifyClusterActionDenied( - ClusterPrivilegeResolver.MANAGE_USER_PROFILE, - "cluster:admin/xpack/security/role/put", - "cluster:admin/xpack/security/role/get", - "cluster:admin/xpack/security/role/delete" - ); verifyClusterActionDenied( ClusterPrivilegeResolver.MANAGE_USER_PROFILE, "cluster:admin/xpack/security/user/put", "cluster:admin/xpack/security/user/get", + "cluster:admin/xpack/security/user/query", "cluster:admin/xpack/security/user/delete" ); verifyClusterActionDenied( diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java index 34b7d3c90b267..813dcc8c8d5a4 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java @@ -91,7 +91,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_061; + return TransportVersions.V_8_10_X; } @Override diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 5cceffd0f4818..f3bb43b9a3f38 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -17,12 +17,12 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.downsample.DownsampleAction; import org.elasticsearch.action.downsample.DownsampleConfig; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; import org.elasticsearch.client.internal.Client; @@ -848,7 +848,7 @@ public void onFailure(Exception e) { /** * Updates the downsample target index metadata (task status) */ - class RefreshDownsampleIndexActionListener implements ActionListener { + class RefreshDownsampleIndexActionListener implements ActionListener { private final ActionListener actionListener; private final TaskId parentTask; @@ -868,7 +868,7 @@ class RefreshDownsampleIndexActionListener implements ActionListener criteriaValues; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java index fcd0f6be8fbcb..f3bc07387512f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java @@ -27,7 +27,7 @@ */ public class QueryRulesetListItem implements Writeable, ToXContentObject { - public static final TransportVersion EXPANDED_RULESET_COUNT_TRANSPORT_VERSION = TransportVersions.V_8_500_061; + public static final TransportVersion EXPANDED_RULESET_COUNT_TRANSPORT_VERSION = TransportVersions.V_8_10_X; public static final ParseField RULESET_ID_FIELD = new ParseField("ruleset_id"); public static final ParseField RULE_TOTAL_COUNT_FIELD = new ParseField("rule_total_count"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java index b23ed92a5d9b8..3882b6c61bb2c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java @@ -73,7 +73,7 @@ public class RuleQueryBuilder extends AbstractQueryBuilder { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_061; + return TransportVersions.V_8_10_X; } public RuleQueryBuilder(QueryBuilder organicQuery, Map matchCriteria, String rulesetId) { diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java index c0a286cc5c464..414705aff0b79 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilter; @@ -258,7 +257,7 @@ protected TaskId cancelTaskWithXOpaqueId(String id, String action) { TaskId taskId = findTaskWithXOpaqueId(id, action); assertNotNull(taskId); logger.trace("Cancelling task " + taskId); - CancelTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); + ListTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); assertThat(response.getTasks(), hasSize(1)); assertThat(response.getTasks().get(0).action(), equalTo(action)); logger.trace("Task is cancelled " + taskId); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index 2d7a330560fcc..f9f9238b6c4ab 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -281,7 +281,7 @@ private Event(StreamInput in) throws IOException { } else { fetchFields = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { missing = in.readBoolean(); } else { missing = index.isEmpty(); @@ -304,7 +304,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(fetchFields, StreamOutput::writeWriteable); } } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_10_X)) { // for BWC, 8.9.1+ does not have "missing" attribute, but it considers events with an empty index "" as missing events // see https://github.com/elastic/elasticsearch/pull/98130 out.writeBoolean(missing); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java index ecb8ce633d985..011b0d09fd8c5 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java @@ -182,7 +182,8 @@ public static SearchRequest prepareRequest(SearchSourceBuilder source, boolean i } public static List searchHits(SearchResponse response) { - return Arrays.asList(response.getHits().getHits()); + // TODO remove unpooled usage + return Arrays.asList(response.getHits().asUnpooled().getHits()); } /** diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java index edbeb3d0a0d8c..255e94d6bda34 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java @@ -289,7 +289,7 @@ private List mutateEvents(List original, TransportVersion version) e.id(), e.source(), version.onOrAfter(TransportVersions.V_7_13_0) ? e.fetchFields() : null, - version.onOrAfter(TransportVersions.V_8_500_061) ? e.missing() : e.index().isEmpty() + version.onOrAfter(TransportVersions.V_8_10_X) ? e.missing() : e.index().isEmpty() ) ); } @@ -299,10 +299,10 @@ private List mutateEvents(List original, TransportVersion version) public void testEmptyIndexAsMissingEvent() throws IOException { Event event = new Event("", "", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), null); BytesStreamOutput out = new BytesStreamOutput(); - out.setTransportVersion(TransportVersions.V_8_500_020);// 8.9.1 + out.setTransportVersion(TransportVersions.V_8_9_X);// 8.9.1 event.writeTo(out); ByteArrayStreamInput in = new ByteArrayStreamInput(out.bytes().array()); - in.setTransportVersion(TransportVersions.V_8_500_020); + in.setTransportVersion(TransportVersions.V_8_9_X); Event event2 = Event.readFrom(in); assertTrue(event2.missing()); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java index f391e9bdae84b..7bb6a228f6e48 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java @@ -74,14 +74,14 @@ public void query(QueryRequest r, ActionListener l) { } long sortValue = implicitTiebreakerValues.get(ordinal); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues( new Long[] { (long) ordinal, sortValue }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW } ) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); ActionListener.respondAndRelease( l, new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY) @@ -94,7 +94,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java index eb417570cb4a7..a8ed842e94c44 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java @@ -188,7 +188,7 @@ static class EventsAsHits { Map documentFields = new HashMap<>(); documentFields.put(KEY_FIELD_NAME, new DocumentField(KEY_FIELD_NAME, Collections.singletonList(value.v1()))); // save the timestamp both as docId (int) and as id (string) - SearchHit searchHit = new SearchHit(entry.getKey(), entry.getKey().toString()); + SearchHit searchHit = SearchHit.unpooled(entry.getKey(), entry.getKey().toString()); searchHit.addDocumentFields(documentFields, Map.of()); hits.add(searchHit); } @@ -215,7 +215,7 @@ public void query(QueryRequest r, ActionListener l) { Map> evs = ordinal != Integer.MAX_VALUE ? events.get(ordinal) : emptyMap(); EventsAsHits eah = new EventsAsHits(evs); - SearchHits searchHits = new SearchHits( + SearchHits searchHits = SearchHits.unpooled( eah.hits.toArray(SearchHits.EMPTY), new TotalHits(eah.hits.size(), Relation.EQUAL_TO), 0.0f @@ -232,7 +232,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java index 9141555fcd613..b880ec4b06926 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.composite.InternalComposite; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -171,8 +172,8 @@ public void fetchHits(Iterable> refs, ActionListener searchHits = new ArrayList<>(); - searchHits.add(new SearchHit(1, String.valueOf(1))); - searchHits.add(new SearchHit(2, String.valueOf(2))); + searchHits.add(SearchHit.unpooled(1, String.valueOf(1))); + searchHits.add(SearchHit.unpooled(2, String.valueOf(2))); return new Sample(new SequenceKey(randomAlphaOfLength(10)), searchHits); } @@ -224,7 +225,7 @@ void handleSearchRequest(ActionListener asSearchHitsList(Integer... docIds) { } List searchHits = new ArrayList<>(docIds.length); for (Integer docId : docIds) { - searchHits.add(new SearchHit(docId, docId.toString())); + searchHits.add(SearchHit.unpooled(docId, docId.toString())); } return searchHits; diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java index b995693458095..f62100a98b066 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/CriterionOrdinalExtractionTests.java @@ -153,7 +153,7 @@ private SearchHit searchHit(Object timeValue, Object tiebreakerValue, Supplier fields = new HashMap<>(); fields.put(tsField, new DocumentField(tsField, singletonList(timeValue))); fields.put(tbField, new DocumentField(tsField, singletonList(tiebreakerValue))); - SearchHit searchHit = new SearchHit(randomInt(), randomAlphaOfLength(10)); + SearchHit searchHit = SearchHit.unpooled(randomInt(), randomAlphaOfLength(10)); searchHit.addDocumentFields(fields, Map.of()); searchHit.sortValues(searchSortValues.get()); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java index 9c9bbfcdc5127..0bdb88592ce0f 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java @@ -135,7 +135,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } @@ -236,12 +236,12 @@ protected void @SuppressWarnings("unchecked") void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { int ordinal = searchRequest.source().terminateAfter(); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 0.0f); SearchResponse response = new SearchResponse( searchHits, null, diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java index 7ef2b95d982fb..3097fbbc7f04a 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java @@ -107,11 +107,11 @@ static class TestQueryClient implements QueryClient { @Override public void query(QueryRequest r, ActionListener l) { int ordinal = r.searchSource().terminateAfter(); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); ActionListener.respondAndRelease( l, new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY) @@ -124,7 +124,7 @@ public void fetchHits(Iterable> refs, ActionListener ref : refs) { List hits = new ArrayList<>(ref.size()); for (HitReference hitRef : ref) { - hits.add(new SearchHit(-1, hitRef.id())); + hits.add(SearchHit.unpooled(-1, hitRef.id())); } searchHits.add(hits); } @@ -425,12 +425,12 @@ private class SuccessfulESMockClient extends ESMockClient { @Override void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { int ordinal = searchRequest.source().terminateAfter(); - SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + SearchHit searchHit = SearchHit.unpooled(ordinal, String.valueOf(ordinal)); searchHit.sortValues( new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) ); - SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); + SearchHits searchHits = SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); SearchResponse response = new SearchResponse( searchHits, null, @@ -477,11 +477,11 @@ void handleSearchRequest(ActionListener void handleSearchRequest(ActionListener GEO.wkbToWkt((BytesRef) x)); } else if (expectedType == Type.CARTESIAN_POINT) { expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbToWkt((BytesRef) x)); + } else if (expectedType == Type.GEO_SHAPE) { + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> GEO.wkbToWkt((BytesRef) x)); + } else if (expectedType == Type.CARTESIAN_SHAPE) { + expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> CARTESIAN.wkbToWkt((BytesRef) x)); } else if (expectedType == Type.IP) { // convert BytesRef-packed IP to String, allowing subsequent comparison with what's expected expectedValue = rebuildExpected(expectedValue, BytesRef.class, x -> DocValueFormat.IP.format((BytesRef) x)); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index d49d5a964e944..4e0f0b8661631 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -61,6 +61,9 @@ public final class CsvTestUtils { private static final int MAX_WIDTH = 20; private static final CsvPreference CSV_SPEC_PREFERENCES = new CsvPreference.Builder('"', '|', "\r\n").build(); private static final String NULL_VALUE = "null"; + private static final char ESCAPE_CHAR = '\\'; + public static final String COMMA_ESCAPING_REGEX = "(?> loadPageFromCsv(URL source) throws Excep record CsvColumn(String name, Type type, BuilderWrapper builderWrapper) implements Releasable { void append(String stringValue) { - if (stringValue.contains(",")) {// multi-value field + if (stringValue.startsWith("\"") && stringValue.endsWith("\"")) { // string value + stringValue = stringValue.substring(1, stringValue.length() - 1).replace(ESCAPED_COMMA_SEQUENCE, ","); + } else if (stringValue.contains(",")) {// multi-value field builderWrapper().builder().beginPositionEntry(); String[] arrayOfValues = delimitedListToStringArray(stringValue, ","); @@ -229,6 +234,8 @@ public void close() { * Takes a csv String and converts it to a String array. Also, it recognizes an opening bracket "[" in one string and a closing "]" * in another string and it creates a single concatenated comma-separated String of all the values between the opening bracket entry * and the closing bracket entry. In other words, entries enclosed by "[]" are returned as a single element. + * + * Commas can be escaped with \ (backslash) character. */ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) { var mvCompressedEntries = new ArrayList(); @@ -237,14 +244,20 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) int pos = 0; // current position in the csv String int commaPos; // current "," character position + int previousCommaPos = 0; while ((commaPos = csvLine.indexOf(",", pos)) != -1 || pos <= csvLine.length()) { + if (commaPos > 0 && csvLine.charAt(commaPos - 1) == ESCAPE_CHAR) {// skip the escaped comma + pos = commaPos + 1;// moving on to the next character after comma + continue; + } + boolean isLastElement = commaPos == -1; - String entry = csvLine.substring(pos, isLastElement ? csvLine.length() : commaPos).trim(); + String entry = csvLine.substring(previousCommaPos, isLastElement ? csvLine.length() : commaPos).trim(); if (entry.startsWith("[")) { if (previousMvValue != null || (isLastElement && entry.endsWith("]") == false)) { String message = "Error line [{}:{}]: Unexpected start of a multi-value field value; current token [{}], " + (isLastElement ? "no closing point" : "previous token [{}]"); - throw new IllegalArgumentException(format(message, lineNumber, pos, entry, previousMvValue)); + throw new IllegalArgumentException(format(message, lineNumber, previousCommaPos, entry, previousMvValue)); } if (entry.endsWith("]")) { if (entry.length() > 2) {// single-valued multivalue field :shrug: @@ -263,7 +276,7 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) format( "Error line [{}:{}]: Unexpected end of a multi-value field value (no previous starting point); found [{}]", lineNumber, - pos, + previousCommaPos, entry ) ); @@ -279,8 +292,8 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) format( "Error line [{}:{}]: Unexpected missing value in a multi-value column; found [{}]", lineNumber, - pos, - csvLine.substring(pos - 1) + previousCommaPos, + csvLine.substring(previousCommaPos - 1) ) ); } @@ -290,12 +303,22 @@ static String[] multiValuesAwareCsvToStringArray(String csvLine, int lineNumber) } } pos = 1 + (isLastElement ? csvLine.length() : commaPos);// break out of the loop if it reached its last element + previousCommaPos = pos; } return mvCompressedEntries.toArray(String[]::new); } public record ExpectedResults(List columnNames, List columnTypes, List> values) {} + /** + * The method loads a section of a .csv-spec file representing the results of executing the query of that section. + * It reads both the schema (field names and their types) and the row values. + * Values starting with an opening square bracket and ending with a closing square bracket are considered multi-values. Inside + * these multi-values, commas separate the individual values and escaped commas are allowed with a prefixed \ + * default \ (backslash) character. + * @param csv a string representing the header and row values of a single query execution result + * @return data structure with column names, their types and values + */ public static ExpectedResults loadCsvSpecValues(String csv) { List columnNames; List columnTypes; @@ -338,13 +361,21 @@ public static ExpectedResults loadCsvSpecValues(String csv) { if (value.startsWith("[") ^ value.endsWith("]")) { throw new IllegalArgumentException("Incomplete multi-value (opening and closing square brackets) found " + value); } - if (value.contains(",") && value.startsWith("[")) {// commas outside a multi-value should be ok - List listOfMvValues = new ArrayList<>(); - for (String mvValue : delimitedListToStringArray(value.substring(1, value.length() - 1), ",")) { - listOfMvValues.add(columnTypes.get(i).convert(mvValue.trim())); + if (value.contains(",") && value.startsWith("[")) { + // split on commas but ignoring escaped commas + String[] multiValues = value.substring(1, value.length() - 1).split(COMMA_ESCAPING_REGEX); + if (multiValues.length > 0) { + List listOfMvValues = new ArrayList<>(); + for (String mvValue : multiValues) { + listOfMvValues.add(columnTypes.get(i).convert(mvValue.trim().replace(ESCAPED_COMMA_SEQUENCE, ","))); + } + rowValues.add(listOfMvValues); + } else { + rowValues.add(columnTypes.get(i).convert(value.replace(ESCAPED_COMMA_SEQUENCE, ","))); } - rowValues.add(listOfMvValues); } else { + // The value considered here is the one where any potential escaped comma is kept as is (with the escape char) + // TODO if we'd want escaped commas outside multi-values fields, we'd have to adjust this value here as well rowValues.add(columnTypes.get(i).convert(value)); } } @@ -392,7 +423,9 @@ public enum Type { ), BOOLEAN(Booleans::parseBoolean, Boolean.class), GEO_POINT(x -> x == null ? null : GEO.wktToWkb(x), BytesRef.class), - CARTESIAN_POINT(x -> x == null ? null : CARTESIAN.wktToWkb(x), BytesRef.class); + CARTESIAN_POINT(x -> x == null ? null : CARTESIAN.wktToWkb(x), BytesRef.class), + GEO_SHAPE(x -> x == null ? null : GEO.wktToWkb(x), BytesRef.class), + CARTESIAN_SHAPE(x -> x == null ? null : CARTESIAN.wktToWkb(x), BytesRef.class); private static final Map LOOKUP = new HashMap<>(); @@ -457,7 +490,7 @@ public static Type asType(ElementType elementType, Type actualType) { } private static Type bytesRefBlockType(Type actualType) { - if (actualType == GEO_POINT || actualType == CARTESIAN_POINT) { + if (actualType == GEO_POINT || actualType == CARTESIAN_POINT || actualType == GEO_SHAPE || actualType == CARTESIAN_SHAPE) { return actualType; } else { return KEYWORD; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java index 3df70b3b83d37..1e26a3df45419 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestsDataLoader.java @@ -42,8 +42,9 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.common.Strings.delimitedListToStringArray; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.CsvTestUtils.COMMA_ESCAPING_REGEX; +import static org.elasticsearch.xpack.esql.CsvTestUtils.ESCAPED_COMMA_SEQUENCE; import static org.elasticsearch.xpack.esql.CsvTestUtils.multiValuesAwareCsvToStringArray; public class CsvTestsDataLoader { @@ -56,26 +57,29 @@ public class CsvTestsDataLoader { private static final TestsDataset CLIENT_IPS = new TestsDataset("clientips", "mapping-clientips.json", "clientips.csv"); private static final TestsDataset AIRPORTS = new TestsDataset("airports", "mapping-airports.json", "airports.csv"); private static final TestsDataset AIRPORTS_WEB = new TestsDataset("airports_web", "mapping-airports_web.json", "airports_web.csv"); + private static final TestsDataset COUNTRIES_BBOX = new TestsDataset( + "countries_bbox", + "mapping-countries_bbox.json", + "countries_bbox.csv" + ); + private static final TestsDataset COUNTRIES_BBOX_WEB = new TestsDataset( + "countries_bbox_web", + "mapping-countries_bbox_web.json", + "countries_bbox_web.csv" + ); - public static final Map CSV_DATASET_MAP = Map.of( - EMPLOYEES.indexName, - EMPLOYEES, - HOSTS.indexName, - HOSTS, - APPS.indexName, - APPS, - LANGUAGES.indexName, - LANGUAGES, - UL_LOGS.indexName, - UL_LOGS, - SAMPLE_DATA.indexName, - SAMPLE_DATA, - CLIENT_IPS.indexName, - CLIENT_IPS, - AIRPORTS.indexName, - AIRPORTS, - AIRPORTS_WEB.indexName, - AIRPORTS_WEB + public static final Map CSV_DATASET_MAP = Map.ofEntries( + Map.entry(EMPLOYEES.indexName, EMPLOYEES), + Map.entry(HOSTS.indexName, HOSTS), + Map.entry(APPS.indexName, APPS), + Map.entry(LANGUAGES.indexName, LANGUAGES), + Map.entry(UL_LOGS.indexName, UL_LOGS), + Map.entry(SAMPLE_DATA.indexName, SAMPLE_DATA), + Map.entry(CLIENT_IPS.indexName, CLIENT_IPS), + Map.entry(AIRPORTS.indexName, AIRPORTS), + Map.entry(AIRPORTS_WEB.indexName, AIRPORTS_WEB), + Map.entry(COUNTRIES_BBOX.indexName, COUNTRIES_BBOX), + Map.entry(COUNTRIES_BBOX_WEB.indexName, COUNTRIES_BBOX_WEB) ); private static final EnrichConfig LANGUAGES_ENRICH = new EnrichConfig("languages_policy", "enrich-policy-languages.json"); @@ -137,17 +141,33 @@ public static void main(String[] args) throws IOException { } try (RestClient client = builder.build()) { - loadDataSetIntoEs(client); + loadDataSetIntoEs(client, (restClient, indexName, indexMapping) -> { + Request request = new Request("PUT", "/" + indexName); + request.setJsonEntity("{\"mappings\":" + indexMapping + "}"); + restClient.performRequest(request); + }); } } + private static void loadDataSetIntoEs(RestClient client, IndexCreator indexCreator) throws IOException { + loadDataSetIntoEs(client, LogManager.getLogger(CsvTestsDataLoader.class), indexCreator); + } + public static void loadDataSetIntoEs(RestClient client) throws IOException { - loadDataSetIntoEs(client, LogManager.getLogger(CsvTestsDataLoader.class)); + loadDataSetIntoEs(client, (restClient, indexName, indexMapping) -> { + ESRestTestCase.createIndex(restClient, indexName, null, indexMapping, null); + }); } public static void loadDataSetIntoEs(RestClient client, Logger logger) throws IOException { + loadDataSetIntoEs(client, logger, (restClient, indexName, indexMapping) -> { + ESRestTestCase.createIndex(restClient, indexName, null, indexMapping, null); + }); + } + + private static void loadDataSetIntoEs(RestClient client, Logger logger, IndexCreator indexCreator) throws IOException { for (var dataSet : CSV_DATASET_MAP.values()) { - load(client, dataSet.indexName, "/" + dataSet.mappingFileName, "/" + dataSet.dataFileName, logger); + load(client, dataSet.indexName, "/" + dataSet.mappingFileName, "/" + dataSet.dataFileName, logger, indexCreator); } forceMerge(client, CSV_DATASET_MAP.keySet(), logger); for (var policy : ENRICH_POLICIES) { @@ -169,7 +189,14 @@ private static void loadEnrichPolicy(RestClient client, String policyName, Strin client.performRequest(request); } - private static void load(RestClient client, String indexName, String mappingName, String dataName, Logger logger) throws IOException { + private static void load( + RestClient client, + String indexName, + String mappingName, + String dataName, + Logger logger, + IndexCreator indexCreator + ) throws IOException { URL mapping = CsvTestsDataLoader.class.getResource(mappingName); if (mapping == null) { throw new IllegalArgumentException("Cannot find resource " + mappingName); @@ -178,14 +205,10 @@ private static void load(RestClient client, String indexName, String mappingName if (data == null) { throw new IllegalArgumentException("Cannot find resource " + dataName); } - createTestIndex(client, indexName, readTextFile(mapping)); + indexCreator.createIndex(client, indexName, readTextFile(mapping)); loadCsvData(client, indexName, data, CsvTestsDataLoader::createParser, logger); } - private static void createTestIndex(RestClient client, String indexName, String mapping) throws IOException { - ESRestTestCase.createIndex(client, indexName, null, mapping, null); - } - public static String readTextFile(URL resource) throws IOException { try (BufferedReader reader = TestUtils.reader(resource)) { StringBuilder b = new StringBuilder(); @@ -198,6 +221,20 @@ public static String readTextFile(URL resource) throws IOException { } @SuppressWarnings("unchecked") + /** + * Loads a classic csv file in an ES cluster using a RestClient. + * The structure of the file is as follows: + * - commented lines should start with "//" + * - the first non-comment line from the file is the schema line (comma separated field_name:ES_data_type elements) + * - sub-fields should be placed after the root field using a dot notation for the name: + * root_field:long,root_field.sub_field:integer + * - a special _id field can be used in the schema and the values of this field will be used in the bulk request as actual doc ids + * - all subsequent non-comment lines represent the values that will be used to build the _bulk request + * - an empty string "" refers to a null value + * - a value starting with an opening square bracket "[" and ending with a closing square bracket "]" refers to a multi-value field + * - multi-values are comma separated + * - commas inside multivalue fields can be escaped with \ (backslash) character + */ private static void loadCsvData( RestClient client, String indexName, @@ -278,17 +315,27 @@ private static void loadCsvData( if (i > 0 && row.length() > 0) { row.append(","); } - if (entries[i].contains(",")) {// multi-value + // split on comma ignoring escaped commas + String[] multiValues = entries[i].split(COMMA_ESCAPING_REGEX); + if (multiValues.length > 0) {// multi-value StringBuilder rowStringValue = new StringBuilder("["); - for (String s : delimitedListToStringArray(entries[i], ",")) { - rowStringValue.append("\"" + s + "\","); + for (String s : multiValues) { + if (entries[i].startsWith("\"") == false || entries[i].endsWith("\"") == false) { + rowStringValue.append("\"" + s + "\","); + } else { + rowStringValue.append(s + ","); + } } // remove the last comma and put a closing bracket instead rowStringValue.replace(rowStringValue.length() - 1, rowStringValue.length(), "]"); entries[i] = rowStringValue.toString(); } else { - entries[i] = "\"" + entries[i] + "\""; + if (entries[i].startsWith("\"") == false || entries[i].endsWith("\"") == false) { + entries[i] = "\"" + entries[i] + "\""; + } } + // replace any escaped commas with single comma + entries[i] = entries[i].replace(ESCAPED_COMMA_SEQUENCE, ","); row.append("\"" + columns[i] + "\":" + entries[i]); } catch (Exception e) { throw new IllegalArgumentException( @@ -356,4 +403,8 @@ private static XContentParser createParser(XContent xContent, InputStream data) public record TestsDataset(String indexName, String mappingFileName, String dataFileName) {} public record EnrichConfig(String policyName, String policyFileName) {} + + private interface IndexCreator { + void createIndex(RestClient client, String indexName, String mapping) throws IOException; + } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 8edcdd9edb124..9c8d5f420d53b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -145,7 +145,7 @@ public static Map loadMapping(String name) { } public static EnrichResolution emptyPolicyResolution() { - return new EnrichResolution(Set.of(), Set.of()); + return new EnrichResolution(); } public static SearchStats statsForMissingField(String... names) { diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox.csv new file mode 100644 index 0000000000000..f8701f386e73b --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox.csv @@ -0,0 +1,249 @@ +id:keyword,name:keyword,shape:geo_shape +FLK,Falkland Is.,"BBOX(-61.148055\, -57.733200\, -51.249455\, -52.343055)" +GUF,French Guiana,"BBOX(-54.603782\, -51.648055\, 5.755418\, 2.113473)" +GUY,Guyana,"BBOX(-61.389727\, -56.470636\, 8.535273\, 1.186873)" +PCN,Pitcairn Is.,"BBOX(-130.105055\, -128.286118\, -24.325836\, -25.082227)" +SGS,South Georgia & the South Sandwich Is.,"BBOX(-38.023755\, -26.241391\, -53.989727\, -58.498609)" +SHN,St. Helena,"BBOX(-5.792782\, -5.645282\, -15.903755\, -16.021946)" +SUR,Suriname,"BBOX(-58.071400\, -53.986118\, 6.001809\, 1.836245)" +TTO,Trinidad & Tobago,"BBOX(-61.921600\, -60.520836\, 11.345554\, 10.040345)" +VEN,Venezuela,"BBOX(-73.378064\, -59.803055\, 12.197500\, 0.649164)" +ASM,American Samoa,"BBOX(-170.823227\, -170.561873\, -14.254309\, -14.375555)" +COK,Cook Is.,"BBOX(-165.848345\, -157.703764\, -10.881318\, -21.940836)" +PYF,French Polynesia,"BBOX(-151.497773\, -138.809755\, -8.778191\, -17.870836)" +UMI,Jarvis I.,"BBOX(-160.045164\, -160.009464\, -0.374309\, -0.398055)" +NIU,Niue,"BBOX(-169.952236\, -169.781555\, -18.963336\, -19.145555)" +WSM,Samoa,"BBOX(-172.780027\, -171.429200\, -13.460555\, -14.057500)" +TKL,Tokelau,"BBOX(-171.862718\, -171.843764\, -9.170627\, -9.218891)" +TON,Tonga,"BBOX(-175.360000\, -173.906827\, -18.568055\, -21.268064)" +WLF,Wallis & Futuna,"BBOX(-178.190273\, -176.121936\, -13.214864\, -14.323891)" +ARG,Argentina,"BBOX(-73.582300\, -53.650009\, -21.780518\, -55.051673)" +BOL,Bolivia,"BBOX(-69.656191\, -57.521118\, -9.679191\, -22.901109)" +BRA,Brazil,"BBOX(-74.004591\, -34.792918\, 5.272709\, -33.741118)" +CHL,Chile,"BBOX(-109.446109\, -66.420627\, -17.505282\, -55.902227)" +ECU,Ecuador,"BBOX(-91.663891\, -75.216846\, 1.437782\, -5.000309)" +PRY,Paraguay,"BBOX(-62.643773\, -54.243900\, -19.296809\, -27.584727)" +PER,Peru,"BBOX(-81.355146\, -68.673909\, -0.036873\, -18.348546)" +URY,Uruguay,"BBOX(-58.438609\, -53.098300\, -30.096673\, -34.943818)" +UMI,Baker I.,"BBOX(-176.467655\, -176.455855\, 0.222573\, 0.215282)" +CAN,Canada,"BBOX(-141.002991\, -52.617364\, 83.113873\, 41.675554)" +GTM,Guatemala,"BBOX(-92.246782\, -88.214736\, 17.821109\, 13.745836)" +UMI,Howland I.,"BBOX(-176.643082\, -176.631091\, 0.808609\, 0.790282)" +UMI,Johnston Atoll,"BBOX(-169.538936\, -169.523927\, 16.730273\, 16.724164)" +MEX,Mexico,"BBOX(-118.404164\, -86.738618\, 32.718454\, 14.550545)" +UMI,Midway Is.,"BBOX(-177.395845\, -177.360545\, 28.221518\, 28.184154)" +BRB,Barbados,"BBOX(-59.659446\, -59.427082\, 13.337082\, 13.050554)" +DMA,Dominica,"BBOX(-61.491391\, -61.250700\, 15.631945\, 15.198054)" +GRD,Grenada,"BBOX(-61.785182\, -61.596391\, 12.237154\, 11.996945)" +GLP,Guadeloupe,"BBOX(-61.796109\, -61.187082\, 16.512918\, 15.870000)" +MTQ,Martinique,"BBOX(-61.231536\, -60.816946\, 14.880136\, 14.402773)" +LCA,St. Lucia,"BBOX(-61.079582\, -60.878064\, 14.109309\, 13.709445)" +SPM,St. Pierre & Miquelon,"BBOX(-56.397782\, -56.145500\, 47.135827\, 46.747191)" +VCT,St. Vincent & the Grenadines,"BBOX(-61.280146\, -61.120282\, 13.383191\, 13.130282)" +ABW,Aruba,"BBOX(-70.059664\, -69.874864\, 12.627773\, 12.411109)" +BMU,Bermuda,"BBOX(-64.823064\, -64.676809\, 32.379509\, 32.260554)" +DOM,Dominican Republic,"BBOX(-72.003064\, -68.322927\, 19.930827\, 17.604164)" +HTI,Haiti,"BBOX(-74.467791\, -71.629182\, 20.091454\, 18.022782)" +JAM,Jamaica,"BBOX(-78.373900\, -76.221118\, 18.522500\, 17.697218)" +ANT,Netherlands Antilles,"BBOX(-69.163618\, -68.192927\, 12.383891\, 12.020554)" +BHS,The Bahamas,"BBOX(-78.978900\, -72.738891\, 26.929164\, 20.915273)" +TCA,Turks & Caicos Is.,"BBOX(-72.031464\, -71.127573\, 21.957773\, 21.429918)" +BLZ,Belize,"BBOX(-89.216400\, -87.779591\, 18.489900\, 15.889854)" +CYM,Cayman Is.,"BBOX(-81.400836\, -81.093064\, 19.354164\, 19.265000)" +COL,Colombia,"BBOX(-81.720146\, -66.870455\, 12.590273\, -4.236873)" +CRI,Costa Rica,"BBOX(-85.911391\, -82.561400\, 11.212845\, 8.025673)" +CUB,Cuba,"BBOX(-84.952927\, -74.131255\, 23.194027\, 19.821945)" +SLV,El Salvador,"BBOX(-90.108064\, -87.694673\, 14.431982\, 13.156391)" +HND,Honduras,"BBOX(-89.350491\, -83.131855\, 16.435827\, 12.985173)" +NIC,Nicaragua,"BBOX(-87.689827\, -83.131855\, 15.022218\, 10.709691)" +PAN,Panama,"BBOX(-83.030291\, -77.198336\, 9.620136\, 7.206109)" +AIA,Anguilla,"BBOX(-63.167782\, -62.972709\, 18.272982\, 18.164445)" +ATG,Antigua & Barbuda,"BBOX(-61.891109\, -61.666946\, 17.724300\, 16.989718)" +VGB,British Virgin Is.,"BBOX(-64.698482\, -64.324527\, 18.504854\, 18.383891)" +MSR,Montserrat,"BBOX(-62.236946\, -62.138891\, 16.812354\, 16.671391)" +PRI,Puerto Rico,"BBOX(-67.266400\, -65.301118\, 18.519445\, 17.922218)" +KNA,St. Kitts & Nevis,"BBOX(-62.862782\, -62.622509\, 17.410136\, 17.208882)" +VIR,Virgin Is.,"BBOX(-65.023509\, -64.562573\, 18.387673\, 17.676664)" +FRO,Faroe Is.,"BBOX(-7.433473\, -6.389718\, 62.357500\, 61.388327)" +GRL,Greenland,"BBOX(-73.053609\, -12.157637\, 83.623600\, 59.790273)" +XGK,Guernsey,"BBOX(-2.668609\, -2.500973\, 49.508191\, 49.422491)" +ISL,Iceland,"BBOX(-24.538400\, -13.499446\, 66.536100\, 63.390000)" +IRL,Ireland,"BBOX(-10.474727\, -6.013055\, 55.379991\, 51.445545)" +XIM,Isle of Man,"BBOX(-4.787155\, -4.308682\, 54.416382\, 54.055545)" +SJM,Jan Mayen,"BBOX(-9.119909\, -7.928509\, 71.180818\, 70.803863)" +XJE,Jersey,"BBOX(-2.247364\, -2.015000\, 49.261109\, 49.167773)" +GBR,United Kingdom,"BBOX(-8.171664\, 1.749445\, 60.843327\, 49.955273)" +CPV,Cape Verde,"BBOX(-25.360555\, -22.666109\, 17.192364\, 14.811109)" +CIV,Cote d'Ivoire,"BBOX(-8.606382\, -2.487782\, 10.735254\, 4.344718)" +GHA,Ghana,"BBOX(-3.248891\, 1.202782\, 11.155691\, 4.727082)" +GIB,Gibraltar,"BBOX(-5.356173\, -5.334509\, 36.163309\, 36.112073)" +LBR,Liberia,"BBOX(-11.492327\, -7.368400\, 8.512782\, 4.343609)" +MAR,Morocco,"BBOX(-13.174964\, -1.011809\, 35.919164\, 27.664236)" +PRT,Portugal,"BBOX(-31.289027\, -6.190455\, 42.150673\, 32.637500)" +ESP,Spain,"BBOX(-18.169864\, 4.316945\, 43.764300\, 27.637500)" +ESH,Western Sahara,"BBOX(-17.101527\, -8.666391\, 27.666954\, 20.764100)" +BFA,Burkina Faso,"BBOX(-5.520837\, 2.397927\, 15.082773\, 9.395691)" +GIN,Guinea,"BBOX(-15.080837\, -7.653373\, 12.677500\, 7.193927)" +GNB,Guinea-Bissau,"BBOX(-16.717773\, -13.643891\, 12.684718\, 10.925100)" +MLI,Mali,"BBOX(-12.244837\, 4.251391\, 25.000273\, 10.142154)" +MRT,Mauritania,"BBOX(-17.075555\, -4.806109\, 27.290454\, 14.725636)" +SEN,Senegal,"BBOX(-17.532782\, -11.369927\, 16.690618\, 12.301745)" +SLE,Sierra Leone,"BBOX(-13.295609\, -10.264309\, 9.997500\, 6.923609)" +GMB,The Gambia,"BBOX(-16.821664\, -13.798609\, 13.826391\, 13.059973)" +DJI,Djibouti,"BBOX(41.759854\, 43.420409\, 12.708327\, 10.942218)" +ERI,Eritrea,"BBOX(36.443282\, 43.121382\, 17.994882\, 12.363891)" +ETH,Ethiopia,"BBOX(32.991800\, 47.988245\, 14.883609\, 3.406664)" +MNG,Mongolia,"BBOX(87.761100\, 119.931509\, 52.142773\, 41.586654)" +SDN,Sudan,"BBOX(21.829100\, 38.607500\, 22.232218\, 3.493391)" +UGA,Uganda,"BBOX(29.574300\, 35.009718\, 4.222782\, -1.476109)" +ISR,Gaza Strip,"BBOX(34.216663\, 34.558891\, 31.596100\, 31.216545)" +IRQ,Iraq,"BBOX(38.794700\, 48.560691\, 37.383673\, 29.061664)" +ISR,Israel,"BBOX(34.267582\, 35.681109\, 33.270273\, 29.486709)" +JOR,Jordan,"BBOX(34.960418\, 39.301109\, 33.377591\, 29.188891)" +KAZ,Kazakhstan,"BBOX(46.499163\, 87.348209\, 55.442627\, 40.594436)" +NOR,Norway,"BBOX(4.789582\, 31.073536\, 71.154709\, 57.987918)" +RUS,Russia,"BBOX(-180.000000\, 180.000000\, 81.851927\, 41.196582)" +SWE,Sweden,"BBOX(11.113336\, 24.167009\, 69.060300\, 55.339164)" +ISR,West Bank,"BBOX(34.888191\, 35.570609\, 32.546391\, 31.350691)" +DZA,Algeria,"BBOX(-8.667218\, 11.986473\, 37.089854\, 18.976391)" +AND,Andorra,"BBOX(1.421391\, 1.781718\, 42.655964\, 42.436382)" +CMR,Cameroon,"BBOX(8.502363\, 16.207000\, 13.085000\, 1.654164)" +CAF,Central African Republic,"BBOX(14.418891\, 27.459718\, 11.000836\, 2.221264)" +LBY,Libya,"BBOX(9.311391\, 25.151663\, 33.171136\, 19.499064)" +MCO,Monaco,"BBOX(7.390900\, 7.439291\, 43.768300\, 43.727545)" +TUN,Tunisia,"BBOX(7.492218\, 11.581663\, 37.340409\, 30.234391)" +BEN,Benin,"BBOX(0.776663\, 3.855000\, 12.396654\, 6.218718)" +TCD,Chad,"BBOX(13.461945\, 24.002745\, 23.450554\, 7.458536)" +GNQ,Equatorial Guinea,"BBOX(8.424163\, 11.353891\, 3.763336\, 0.930154)" +KIR,Kiribati,"BBOX(-157.581700\, 172.947509\, 2.033054\, 1.335991)" +NER,Niger,"BBOX(0.166663\, 15.996663\, 23.522309\, 11.693273)" +NGA,Nigeria,"BBOX(2.692500\, 14.649654\, 13.891500\, 4.272845)" +STP,Sao Tome & Principe,"BBOX(6.465136\, 7.463473\, 1.701245\, 0.018336)" +TGO,Togo,"BBOX(-0.149764\, 1.797800\, 11.138536\, 6.100545)" +ALB,Albania,"BBOX(19.288536\, 21.053327\, 42.660345\, 39.645000)" +BIH,Bosnia & Herzegovina,"BBOX(15.740591\, 19.619782\, 45.265945\, 42.565827)" +HRV,Croatia,"BBOX(13.504791\, 19.425000\, 46.535827\, 42.399991)" +ITA,Italy,"BBOX(6.623963\, 18.514445\, 47.094582\, 36.649164)" +MKD,Macedonia,"BBOX(20.458818\, 23.030973\, 42.358954\, 40.855891)" +MLT,Malta,"BBOX(14.329100\, 14.570000\, 35.991936\, 35.800000)" +SMR,San Marino,"BBOX(12.406945\, 12.511109\, 43.986873\, 43.898682)" +SMN,Serbia & Montenegro,"BBOX(18.453327\, 23.005000\, 46.181109\, 41.849000)" +VTC,Vatican City,"BBOX(12.444473\, 12.457718\, 41.908391\, 41.900891)" +BGR,Bulgaria,"BBOX(22.365273\, 28.605136\, 44.224718\, 41.243045)" +CYP,Cyprus,"BBOX(32.269863\, 34.586036\, 35.688609\, 34.640273)" +EGY,Egypt,"BBOX(24.706800\, 36.895827\, 31.646945\, 21.994164)" +GEO,Georgia,"BBOX(40.002963\, 46.710818\, 43.584718\, 41.048045)" +GRC,Greece,"BBOX(19.640000\, 28.238045\, 41.747773\, 34.930545)" +LBN,Lebanon,"BBOX(35.100827\, 36.623745\, 34.647500\, 33.062082)" +SYR,Syria,"BBOX(35.614463\, 42.378327\, 37.290545\, 32.313609)" +TUR,Turkey,"BBOX(25.665827\, 44.820545\, 42.109991\, 35.818445)" +AUT,Austria,"BBOX(9.533573\, 17.166382\, 49.018745\, 46.407491)" +CZE,Czech Republic,"BBOX(12.093700\, 18.852218\, 51.052491\, 48.581382)" +DNK,Denmark,"BBOX(8.092918\, 15.149163\, 57.745973\, 54.561936)" +HUN,Hungary,"BBOX(16.111800\, 22.894800\, 48.576173\, 45.748327)" +POL,Poland,"BBOX(14.147636\, 24.143473\, 54.836036\, 49.002918)" +SVK,Slovakia,"BBOX(16.844718\, 22.558054\, 49.600827\, 47.737500)" +SVN,Slovenia,"BBOX(13.383473\, 16.607873\, 46.876245\, 45.425818)" +SJM,Svalbard,"BBOX(10.487918\, 33.637500\, 80.764163\, 74.343045)" +BEL,Belgium,"BBOX(2.541663\, 6.398200\, 51.501245\, 49.508882)" +FRA,France,"BBOX(-4.790282\, 9.562218\, 51.091109\, 41.364927)" +DEU,Germany,"BBOX(5.865000\, 15.033818\, 55.056527\, 47.274718)" +LIE,Liechtenstein,"BBOX(9.474636\, 9.633891\, 47.274545\, 47.057454)" +LUX,Luxembourg,"BBOX(5.734445\, 6.524027\, 50.181809\, 49.448464)" +NLD,Netherlands,"BBOX(3.370863\, 7.210973\, 53.465827\, 50.753882)" +CHE,Switzerland,"BBOX(5.967009\, 10.488209\, 47.806664\, 45.829436)" +USA,United States,"BBOX(-178.216555\, 179.775936\, 71.351436\, 18.925482)" +BLR,Belarus,"BBOX(23.165400\, 32.740054\, 56.167491\, 51.251845)" +EST,Estonia,"BBOX(21.837354\, 28.194091\, 59.664718\, 57.522636)" +FIN,Finland,"BBOX(19.511391\, 31.581963\, 70.088609\, 59.806800)" +LVA,Latvia,"BBOX(20.968609\, 28.235963\, 58.083254\, 55.674836)" +LTU,Lithuania,"BBOX(20.942836\, 26.813054\, 56.449854\, 53.890336)" +MDA,Moldova,"BBOX(26.634991\, 30.128709\, 48.468318\, 45.448645)" +ROM,Romania,"BBOX(20.261027\, 29.672218\, 48.263882\, 43.623309)" +UKR,Ukraine,"BBOX(22.151445\, 40.178745\, 52.378600\, 44.379154)" +IND,India,"BBOX(68.144227\, 97.380536\, 35.505618\, 6.745827)" +MDV,Maldives,"BBOX(72.863391\, 73.637272\, 7.027773\, -0.641664)" +OMN,Oman,"BBOX(51.999291\, 59.847082\, 26.368709\, 16.642782)" +SOM,Somalia,"BBOX(40.988609\, 51.411318\, 11.979164\, -1.674873)" +LKA,Sri Lanka,"BBOX(79.696091\, 81.891663\, 9.828191\, 5.918054)" +TKM,Turkmenistan,"BBOX(51.250182\, 66.670882\, 42.796173\, 35.145991)" +UZB,Uzbekistan,"BBOX(55.997491\, 73.167545\, 45.570591\, 37.184991)" +YEM,Yemen,"BBOX(42.555973\, 54.473473\, 18.999345\, 12.144718)" +ARM,Armenia,"BBOX(43.454163\, 46.620536\, 41.297054\, 38.841145)" +AZE,Azerbaijan,"BBOX(44.778863\, 51.677009\, 42.710754\, 38.262809)" +BHR,Bahrain,"BBOX(50.453327\, 50.796391\, 26.288891\, 25.571945)" +IRN,Iran,"BBOX(44.034954\, 63.330273\, 39.779154\, 25.075973)" +KWT,Kuwait,"BBOX(46.546945\, 48.416591\, 30.084164\, 28.538882)" +QAT,Qatar,"BBOX(50.751936\, 51.615827\, 26.152500\, 24.556045)" +SAU,Saudi Arabia,"BBOX(34.572145\, 55.666109\, 32.154945\, 16.377500)" +ARE,United Arab Emirates,"BBOX(51.583327\, 56.381663\, 26.083882\, 22.633327)" +AFG,Afghanistan,"BBOX(60.504163\, 74.915736\, 38.471982\, 29.406109)" +KGZ,Kyrgyzstan,"BBOX(69.249500\, 80.281582\, 43.216900\, 39.195473)" +NPL,Nepal,"BBOX(80.052200\, 88.194554\, 30.424718\, 26.368364)" +PAK,Pakistan,"BBOX(60.866300\, 77.823927\, 37.060791\, 23.688045)" +TJK,Tajikistan,"BBOX(67.364700\, 75.187482\, 41.049254\, 36.671845)" +BGD,Bangladesh,"BBOX(88.043872\, 92.669345\, 26.626136\, 20.744818)" +BTN,Bhutan,"BBOX(88.751936\, 92.114218\, 28.325000\, 26.703609)" +BRN,Brunei,"BBOX(114.095082\, 115.360263\, 5.053054\, 4.018191)" +CHN,China,"BBOX(73.620045\, 134.768463\, 53.553745\, 18.168882)" +JPN,Japan,"BBOX(123.678863\, 145.812409\, 45.486382\, 24.251391)" +PRK,North Korea,"BBOX(124.323954\, 130.697418\, 43.006100\, 37.671382)" +PLW,Palau,"BBOX(134.452482\, 134.658872\, 7.729445\, 7.305254)" +PHL,Philippines,"BBOX(116.950000\, 126.598036\, 19.391109\, 5.049164)" +KOR,South Korea,"BBOX(126.099018\, 129.586872\, 38.625245\, 33.192209)" +KHM,Cambodia,"BBOX(102.346509\, 107.636382\, 14.708618\, 10.422736)" +LAO,Laos,"BBOX(100.091372\, 107.695254\, 22.499927\, 13.926664)" +MYS,Malaysia,"BBOX(99.641936\, 119.275818\, 7.352918\, 0.852782)" +MMR,Myanmar,"BBOX(92.204991\, 101.169427\, 28.546527\, 9.839582)" +SGP,Singapore,"BBOX(103.640945\, 103.997945\, 1.445282\, 1.259027)" +THA,Thailand,"BBOX(97.347272\, 105.639291\, 20.454582\, 5.633473)" +VNM,Vietnam,"BBOX(102.140745\, 109.464845\, 23.324164\, 8.559236)" +GUM,Guam,"BBOX(144.634154\, 144.953309\, 13.652291\, 13.235000)" +MHL,Marshall Is.,"BBOX(162.324963\, 171.378063\, 14.594027\, 5.600273)" +FSM,Micronesia,"BBOX(158.120100\, 163.042891\, 6.977636\, 5.261664)" +MNP,Northern Mariana Is.,"BBOX(145.572682\, 145.818082\, 15.268191\, 14.908054)" +UMI,Wake I.,"BBOX(166.608981\, 166.662200\, 19.324582\, 19.279445)" +BWA,Botswana,"BBOX(19.996109\, 29.373618\, -17.782082\, -26.875555)" +BDI,Burundi,"BBOX(28.985000\, 30.853191\, -2.301564\, -4.448055)" +ATF,French Southern & Antarctic Lands,"BBOX(51.650836\, 70.567491\, -46.327645\, -49.725009)" +HMD,Heard I. & McDonald Is.,"BBOX(73.234709\, 73.773882\, -52.965145\, -53.199445)" +KEN,Kenya,"BBOX(33.907218\, 41.905163\, 4.622500\, -4.669618)" +RWA,Rwanda,"BBOX(28.854445\, 30.893263\, -1.054446\, -2.825491)" +TZA,Tanzania,"BBOX(29.340827\, 40.436809\, -0.997218\, -11.740418)" +ZMB,Zambia,"BBOX(21.996391\, 33.702282\, -8.191664\, -18.074918)" +ZWE,Zimbabwe,"BBOX(25.237918\, 33.071591\, -15.616527\, -22.414764)" +ATA,Antarctica,"BBOX(-180.000000\, 180.000000\, -60.503336\, -90.000000)" +NOR,Bouvet I.,"BBOX(3.342363\, 3.484163\, -54.383609\, -54.462782)" +COM,Comoros,"BBOX(43.214027\, 44.530418\, -11.366946\, -12.383055)" +REU,Juan De Nova I.,"BBOX(42.723818\, 42.760900\, -17.052018\, -17.076118)" +LSO,Lesotho,"BBOX(27.013973\, 29.455554\, -28.570691\, -30.650527)" +MWI,Malawi,"BBOX(32.681873\, 35.920963\, -9.376673\, -17.135282)" +MOZ,Mozambique,"BBOX(30.213018\, 40.846109\, -10.471109\, -26.860282)" +ZAF,South Africa,"BBOX(16.483327\, 37.892218\, -22.136391\, -46.969727)" +SWZ,Swaziland,"BBOX(30.798336\, 32.133400\, -25.728336\, -27.316391)" +AGO,Angola,"BBOX(11.731245\, 24.084445\, -4.388991\, -18.016391)" +COG,Congo,"BBOX(11.140663\, 18.643609\, 3.711109\, -5.015000)" +ZAR,Congo\, DRC,"BBOX(12.214554\, 31.302773\, 5.380691\, -13.458055)" +FJI,Fiji,"BBOX(-180.000000\, 180.000000\, -16.153473\, -19.162782)" +GAB,Gabon,"BBOX(8.700836\, 14.519582\, 2.317900\, -3.925282)" +NAM,Namibia,"BBOX(11.716391\, 25.264427\, -16.954173\, -28.961873)" +NZL,New Zealand,"BBOX(-176.848755\, 178.841063\, -34.414718\, -52.578055)" +IOT,British Indian Ocean Territory,"BBOX(72.357900\, 72.494282\, -7.233473\, -7.436246)" +REU,Glorioso Is.,"BBOX(47.279091\, 47.303054\, -11.554100\, -11.577782)" +MDG,Madagascar,"BBOX(43.236827\, 50.501391\, -11.945555\, -25.588336)" +MUS,Mauritius,"BBOX(57.306309\, 63.495754\, -19.673336\, -20.520555)" +MYT,Mayotte,"BBOX(45.039163\, 45.293345\, -12.662500\, -12.992500)" +REU,Reunion,"BBOX(55.220554\, 55.853054\, -20.856527\, -21.373891)" +SYC,Seychelles,"BBOX(46.205691\, 55.540554\, -4.551664\, -9.463055)" +CXR,Christmas I.,"BBOX(105.629000\, 105.751900\, -10.384082\, -10.510973)" +CCK,Cocos Is.,"BBOX(96.817491\, 96.864845\, -12.130418\, -12.199446)" +IDN,Indonesia,"BBOX(95.210945\, 141.007018\, 5.913473\, -10.929655)" +TLS,Timor Leste,"BBOX(124.046100\, 127.308591\, -8.140000\, -9.463627)" +AUS,Australia,"BBOX(112.907209\, 158.960372\, -10.135691\, -54.753891)" +NRU,Nauru,"BBOX(166.904418\, 166.957045\, -0.493336\, -0.552218)" +NCL,New Caledonia,"BBOX(163.982745\, 168.130509\, -20.087918\, -22.673891)" +NFK,Norfolk I.,"BBOX(167.910945\, 167.998872\, -29.000555\, -29.081109)" +PNG,Papua New Guinea,"BBOX(140.858854\, 155.966845\, -1.355282\, -11.642500)" +SLB,Solomon Is.,"BBOX(155.671300\, 166.931836\, -6.605518\, -11.845836)" +TUV,Tuvalu,"BBOX(176.295254\, 179.232281\, -6.089446\, -8.561291)" +VUT,Vanuatu,"BBOX(166.521636\, 169.893863\, -13.707218\, -20.254173)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox_web.csv b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox_web.csv new file mode 100644 index 0000000000000..aa540d40ad604 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/countries_bbox_web.csv @@ -0,0 +1,249 @@ +id:keyword,name:keyword,shape:cartesian_shape +FLK, Falkland Is., "BBOX(-6806970.344651548\, -6426830.424971599\, -6665538.61144021\, -6862393.473674134)" +GUF, French Guiana, "BBOX(-6078465.2067807885\, -5749435.182262659\, 641770.3972926841\, 235324.11002137093)" +GUY, Guyana, "BBOX(-6833873.148626795\, -6286282.4434172455\, 953676.0708782381\, 132131.5480264357)" +PCN, Pitcairn Is., "BBOX(-1.448322847021477E7\, -1.428074532961791E7\, -2793163.209148463\, -2885847.742584221)" +SGS, South Georgia & the South Sandwich Is., "BBOX(-4232785.044058981\, -2921178.2834205604\, -7168210.949791082\, -8072797.261021951)" +SHN, St. Helena, "BBOX(-644849.5424266771\, -628429.9175369549\, -1793579.7338931332\, -1807264.3754193506)" +SUR, Suriname, "BBOX(-6464478.676752644\, -6009707.164829022\, 669343.5434865113\, 204444.85915446977)" +TTO, Trinidad & Tobago, "BBOX(-6893080.980145244\, -6737148.644965401\, 1271316.8958092\, 1123450.7268402777)" +VEN, Venezuela, "BBOX(-8168408.718739186\, -6657245.629555437\, 1368193.4618250781\, 72266.15206230174)" +ASM, American Samoa, "BBOX(-1.901595464265674E7\, -1.8986860848464E7\, -1603409.0061145446\, -1617338.5456514952)" +COK, Cook Is., "BBOX(-1.8462153311737206E7\, -1.755550270221884E7\, -1218650.60324631\, -2504423.3700605934)" +PYF, French Polynesia, "BBOX(-1.686465494432737E7\, -1.5452231241588091E7\, -981029.2116948966\, -2022435.6471389162)" +UMI, Jarvis I., "BBOX(-1.781614615792593E7\, -1.7812172052105166E7\, -41668.183668037316\, -44311.636360225275)" +NIU, Niue, "BBOX(-1.8918996368064713E7\, -1.889999624605927E7\, -2150619.798091522\, -2172080.175292089)" +WSM, Samoa, "BBOX(-1.9233784622210693E7\, -1.9083411248441823E7\, -1512399.049561015\, -1580814.199108954)" +TKL, Tokelau, "BBOX(-1.913167025144482E7\, -1.912956030181662E7\, -1025256.50252298\, -1030699.159919998)" +TON, Tonga, "BBOX(-1.9520985902791113E7\, -1.9359219424419094E7\, -2104146.80131666\, -2423871.209298853)" +WLF, Wallis & Futuna, "BBOX(-1.98360504519132E7\, -1.9605804230316367E7\, -1484290.6690231054\, -1611402.1249494848)" +ARG, Argentina, "BBOX(-8191144.166257678\, -5972291.682103194\, -2485194.106818803\, -7371901.253043402)" +BOL, Bolivia, "BBOX(-7754091.711639628\, -6403221.564728467\, -1082644.4605265881\, -2620063.8163838163)" +BRA, Brazil, "BBOX(-8238153.385337716\, -3873129.9144329783\, 587785.5079629741\, -3994093.243498929)" +CHL, Chile, "BBOX(-1.2183485121489162E7\, -7393910.374780716\, -1979723.0325789037\, -7538976.386388264)" +ECU, Ecuador, "BBOX(-1.0203977668829728E7\, -8373100.994630531\, 160069.96058917182\, -557339.7863215066)" +PRY, Paraguay, "BBOX(-6973472.910758704\, -6038403.325800699\, -2189911.7242244524\, -3196717.5348766074)" +PER, Peru, "BBOX(-9056413.424871765\, -7644744.579599449\, -4104.683866786337\, -2078385.864447083)" +URY, Uruguay, "BBOX(-6505356.195641661\, -5910875.717165678\, -3515982.318158614\, -4156248.8527274607)" +UMI, Baker I., "BBOX(-1.964428949334857E7\, -1.9642975923357394E7\, 24776.775336047573\, 23965.139003268785)" +CAN, Canada, "BBOX(-1.5696381156263582E7\, -5857338.166548977\, 1.7926778413967136E7\, 5112502.227274475)" +GTM, Guatemala, "BBOX(-1.0268864798128676E7\, -9820019.490616102\, 2016620.2477192462\, 1545072.9951440636)" +UMI, Howland I., "BBOX(-1.966381793765724E7\, -1.9662483105643325E7\, 90016.93033465231\, 87976.57940884378)" +UMI, Johnston Atoll, "BBOX(-1.8872988022526257E7\, -1.8871317228289172E7\, 1889449.6904405674\, 1888739.592498257)" +MEX, Mexico, "BBOX(-1.3180691242448486E7\, -9655698.786528189\, 3857992.7910224693\, 1637455.8925958527)" +UMI, Midway Is., "BBOX(-1.9747615131493594E7\, -1.974368555346914E7\, 3276930.956339718\, 3272211.297114333)" +BRB, Barbados, "BBOX(-6641259.148804331\, -6615392.506649243\, 1498269.4980028346\, 1465508.5364990495)" +DMA, Dominica, "BBOX(-6845190.333337227\, -6818396.733782433\, 1762138.8493679555\, 1712035.77580254)" +GRD, Grenada, "BBOX(-6877894.997852321\, -6856878.879868893\, 1372710.0161931934\, 1345360.731534649)" +GLP, Guadeloupe, "BBOX(-6879111.38592805\, -6811314.810418132\, 1864198.7087877272\, 1789672.9198651556)" +MTQ, Martinique, "BBOX(-6816263.407061167\, -6770111.459379609\, 1675390.1030315096\, 1620466.564996925)" +LCA, St. Lucia, "BBOX(-6799347.965159521\, -6776915.084016965\, 1586760.2747788534\, 1540902.846138527)" +SPM, St. Pierre & Miquelon, "BBOX(-6278172.373236121\, -6250088.469463722\, 5964272.744483719\, 5900906.394026551)" +VCT, St. Vincent & the Grenadines, "BBOX(-6821674.647507875\, -6803878.668434177\, 1503545.1028787405\, 1474620.605161206)" +ABW, Aruba, "BBOX(-7799006.120542209\, -7778434.278646477\, 1417237.7724451458\, 1392531.3743975367)" +BMU, Bermuda, "BBOX(-7216070.475135298\, -7199789.443011595\, 3813230.825275473\, 3797561.1925476543)" +DOM, Dominican Republic, "BBOX(-8015344.418919742\, -7605673.442087284\, 2264838.2331280783\, 1991268.1942175906)" +HTI, Haiti, "BBOX(-8289716.573465983\, -7973724.065068766\, 2283868.061303094\, 2040215.3097965734)" +JAM, Jamaica, "BBOX(-8724542.638268478\, -8484896.042272912\, 2098797.886578782\, 2002138.6713165536)" +ANT, Netherlands Antilles, "BBOX(-7699258.7361087445\, -7591201.908286172\, 1389429.1415046235\, 1348047.674912462)" +BHS, The Bahamas, "BBOX(-8791890.930189032\, -8097256.305860282\, 3114624.5106054945\, 2381778.6607825435)" +TCA, Turks & Caicos Is., "BBOX(-8018505.892457832\, -7917885.206619215\, 2506456.133236025\, 2443216.1674464582)" +BLZ, Belize, "BBOX(-9931524.217026532\, -9771579.370801603\, 2094970.9791089285\, 1791970.7485571986)" +CYM, Cayman Is., "BBOX(-9061499.6124054\, -9027238.590089742\, 2196677.690165189\, 2186160.351965059)" +COL, Colombia, "BBOX(-9097045.039005652\, -7443984.998678304\, 1412960.1248500098\, -472076.97756910085)" +CRI, Costa Rica, "BBOX(-9563612.298130559\, -9190693.005900422\, 1256252.842749445\, 896349.8334170822)" +CUB, Cuba, "BBOX(-9456916.57372173\, -8252253.557317591\, 2655499.846135876\, 2251949.753820664)" +SLV, El Salvador, "BBOX(-1.0030783799451409E7\, -9762126.342283737\, 1623823.8238794443\, 1477605.2302434247)" +HND, Honduras, "BBOX(-9946451.158864416\, -9254195.76601206\, 1855249.5859095547\, 1458038.3723417278)" +NIC, Nicaragua, "BBOX(-9761586.888031427\, -9254195.76601206\, 1691760.81737009\, 1199200.9443015517)" +PAN, Panama, "BBOX(-9242889.713250706\, -8593679.45241179\, 1075976.1383535631\, 804303.6245583462)" +AIA, Anguilla, "BBOX(-7031805.325801677\, -7010089.898777183\, 2069525.485454939\, 2056805.549131826)" +ATG, Antigua & Barbuda, "BBOX(-6889686.737551939\, -6864733.02654072\, 2005303.4210994085\, 1919628.1877410556)" +VGB, British Virgin Is., "BBOX(-7202202.070335221\, -7160573.590161418\, 2096726.335695059\, 2082531.6290789556)" +MSR, Montserrat, "BBOX(-6928185.136284053\, -6917269.703615838\, 1898992.8327456792\, 1882606.3105989075)" +PRI, Puerto Rico, "BBOX(-7488061.394454311\, -7269287.202979579\, 2098439.2297828426\, 2028446.302847273)" +KNA, St. Kitts & Nevis, "BBOX(-6997852.881114455\, -6971105.813106805\, 1968620.0064461157\, 1945153.7466145495)" +VIR, Virgin Is., "BBOX(-7238383.9104642505\, -7187072.749663104\, 2082975.2861753216\, 1999737.0895242055)" +FRO, Faroe Is., "BBOX(-827490.42907036\, -711300.1539736006\, 8944413.838654397\, 8715539.142798016)" +GRL, Greenland, "BBOX(-8132290.553358883\, -1353381.9599010698\, 1.841838614386466E7\, 8353191.775986784)" +XGK, Guernsey, "BBOX(-297068.19496499473\, -278407.0408089712\, 6361534.846607885\, 6346855.715083607)" +ISL, Iceland, "BBOX(-2731602.192501422\, -1502751.454502109\, 1.0025136653899286E7\, 9196525.03584683)" +IRL, Ireland, "BBOX(-1166041.2756762397\, -669370.2206187705\, 7435966.643781227\, 6700487.126114637)" +XIM, Isle of Man, "BBOX(-532903.6568742928\, -479640.2861633771\, 7249411.799394163\, 7180682.877256964)" +SJM, Jan Mayen, "BBOX(-1015223.6258196725\, -882597.5845070281\, 1.1464383304063711E7\, 1.1335539300648466E7)" +XJE, Jersey, "BBOX(-250175.41607230977\, -224308.77391722222\, 6319282.822387621\, 6303377.056271344)" +GBR, United Kingdom, "BBOX(-909665.4752870986\, 194747.32654372943\, 8589937.148187652\, 6438533.511709376)" +CPV, Cape Verde, "BBOX(-2823124.068441826\, -2523179.7117936057\, 1943228.8819694468\, 1667440.6983404886)" +CIV, Cote d'Ivoire, "BBOX(-958058.0616790326\, -276938.62540612154\, 1202097.1729137793\, 484115.97315150854)" +GHA, Ghana, "BBOX(-361664.8917125052\, 133893.0797566771\, 1249767.3181259448\, 526814.3511759888)" +GIB, Gibraltar, "BBOX(-596246.4508776823\, -593834.8254294725\, 4323115.767768943\, 4316053.421468498)" +LBR, Liberia, "BBOX(-1279319.9894917065\, -820246.5358469777\, 951144.4190395237\, 483992.16413836647)" +MAR, Morocco, "BBOX(-1466630.283495554\, -112634.06264437255\, 4289504.155676036\, 3206707.2043454945)" +PRT, Portugal, "BBOX(-3483078.5525721395\, -689118.2982827483\, 5183576.317394064\, 3847286.4078652565)" +ESP, Spain, "BBOX(-2022660.0079814764\, 480560.1191156738\, 5429039.221465501\, 3203347.2301618545)" +ESH, Western Sahara, "BBOX(-1903733.2771624175\, -964738.2330011163\, 3207048.827624554\, 2363772.158427126)" +BFA, Burkina Faso, "BBOX(-614576.7635071143\, 266936.0125622843\, 1698741.2811715933\, 1050643.0120585556)" +GIN, Guinea, "BBOX(-1678791.0953426699\, -851969.5850923934\, 1422911.1290510038\, 802936.7522689679)" +GNB, Guinea-Bissau, "BBOX(-1861013.9772984823\, -1518830.9983475052\, 1423734.7230846898\, 1223613.9918118552)" +MLI, Mali, "BBOX(-1363089.019496892\, 473262.6812172274\, 2875778.1558879707\, 1134962.1365298633)" +MRT, Mauritania, "BBOX(-1900842.0873479373\, -535013.6065024948\, 3159807.24053085\, 1657600.8186799039)" +SEN, Senegal, "BBOX(-1951740.3641577882\, -1265694.4838205066\, 1884840.6777415504\, 1380068.3247828495)" +SLE, Sierra Leone, "BBOX(-1480060.423460439\, -1142617.6510657615\, 1118607.3838558097\, 772615.2434245716)" +GMB, The Gambia, "BBOX(-1872579.0705148762\, -1536054.1273216614\, 1554306.33090056\, 1466584.8753009895)" +DJI, Djibouti, "BBOX(4648685.682234346\, 4833537.819242839\, 1426428.7393574219\, 1225554.7892715929)" +ERI, Eritrea, "BBOX(4056847.594510955\, 4800250.285874032\, 2036949.5002702742\, 1387149.8027029647)" +ETH, Ethiopia, "BBOX(3672630.3758422886\, 5342026.99671924\, 1675790.1336981696\, 379451.74027328007)" +MNG, Mongolia, "BBOX(9769520.962097632\, 1.3350714510090472E7\, 6825981.925445475\, 5099261.916823782)" +SDN, Sudan, "BBOX(2430004.2961371886\, 4297767.240203056\, 2539428.7064047027\, 389123.6754710965)" +UGA, Uganda, "BBOX(3292196.0161092333\, 3897263.9800336002\, 470504.09041435266\, -164337.88255462408)" +ISR, Gaza Strip, "BBOX(3808981.5012748297\, 3847078.1479647276\, 3710408.4677697835\, 3660903.6805555364)" +IRQ, Iraq, "BBOX(4318606.2488766555\, 5405751.393937016\, 4492721.642260634\, 3383496.8234396563)" +ISR, Israel, "BBOX(3814649.7784257433\, 3972002.8842663835\, 3931233.3769460395\, 3437740.2376509146)" +JOR, Jordan, "BBOX(3891775.929138256\, 4374979.440881939\, 3945530.7721081185\, 3399709.663800458)" +KAZ, Kazakhstan, "BBOX(5176263.146752886\, 9723558.146230904\, 7448249.257062752\, 4952703.862043582)" +NOR, Norway, "BBOX(533173.8292784104\, 3459090.2041849457\, 1.1455379410923388E7\, 7964779.911100031)" +RUS, Russia, "BBOX(-2.003750834E7\, 2.003750834E7\, 1.6850434409817755E7\, 5041380.846897432)" +SWE, Sweden, "BBOX(1237130.9043623458\, 2690259.1355019724\, 1.0769543191624273E7\, 7427971.135671626)" +ISR, West Bank, "BBOX(3883735.6562778493\, 3959702.080535439\, 3835248.5789866336\, 3678377.284759022)" +DZA, Algeria, "BBOX(-964830.2942199894\, 1334328.0705815821\, 4451638.686907341\, 2152156.534692522)" +AND, Andorra, "BBOX(158228.52231611632\, 198339.94046960064\, 5259751.808527718\, 5226573.156424563)" +CMR, Cameroon, "BBOX(946478.719567819\, 1804154.9870354445\, 1469444.988943757\, 184166.28005485257)" +CAF, Central African Republic, "BBOX(1605103.603700283\, 3056801.8246613783\, 1232201.6067875316\, 247331.9412217624)" +LBY, Libya, "BBOX(1036539.304552783\, 2799870.317374274\, 3918041.4975678376\, 2213781.647695001)" +MCO, Monaco, "BBOX(822751.2243894777\, 828138.0858677052\, 5429655.8071539095\, 5423375.498489419)" +TUN, Tunisia, "BBOX(834029.8925561006\, 1289264.82751983\, 4486662.225217784\, 3533714.341264127)" +BEN, Benin, "BBOX(86457.72966594121\, 429136.6369483333\, 1390883.792858654\, 693627.7186615759)" +TCD, Chad, "BBOX(1498576.8622784517\, 2671973.3506688518\, 2686597.2252112613\, 832635.3730826946)" +GNQ, Equatorial Guinea, "BBOX(937773.5353889967\, 1263909.364466394\, 419234.1992921709\, 103548.81812163288)" +KIR, Kiribati, "BBOX(-1.7541914599896543E7\, 1.9252428633165136E7\, 226366.04306531145\, 148735.3163895852)" +NER, Niger, "BBOX(18552.840291496777\, 1780740.379303719\, 2695306.478633392\, 1310820.5810745189)" +NGA, Nigeria, "BBOX(299727.7289191666\, 1630792.0233506353\, 1561771.5570046515\, 476092.4293577717)" +STP, Sao Tome & Principe, "BBOX(719695.6473290791\, 830830.0137936934\, 189409.56079307984\, 2041.1542177410504)" +TGO, Togo, "BBOX(-16671.65221684311\, 200130.18052028888\, 1247820.9113916112\, 680396.3710024672)" +ALB, Albania, "BBOX(2147190.0053688344\, 2343645.64081804\, 5260414.963633992\, 4814487.957249004)" +BIH, Bosnia & Herzegovina, "BBOX(1752234.5746612719\, 2184064.14141101\, 5663486.702317411\, 5246118.059706764)" +HRV, Croatia, "BBOX(1503346.4571803163\, 2162381.1083583334\, 5866635.618622956\, 5221085.75286942)" +ITA, Italy, "BBOX(737376.1880908412\, 2061018.5894331736\, 5957525.94908941\, 4390316.944679211)" +MKD, Macedonia, "BBOX(2277465.201675234\, 2563796.186476749\, 5214901.594868669\, 4991108.7995952675)" +MLT, Malta, "BBOX(1595108.1153038554\, 1621924.980632222\, 4299511.834205549\, 4273136.461790226)" +SMR, San Marino, "BBOX(1381134.799507896\, 1392730.2829452723\, 5463410.973754562\, 5449776.352704761)" +SMN, Serbia & Montenegro, "BBOX(2054214.9647958176\, 2560904.8853427777\, 5809419.7157107135\, 5138387.144313233)" +VTC, Vatican City, "BBOX(1385312.3973578045\, 1386786.8240131561\, 5147266.721875869\, 5146144.937762506)" +BGR, Bulgaria, "BBOX(2489690.801465982\, 3184309.173149079\, 5500283.923251328\, 5048257.112102198)" +CYP, Cyprus, "BBOX(3592264.716628652\, 3850099.91554189\, 4257858.611081361\, 4115102.5028513763)" +EGY, Egypt, "BBOX(2750348.3947484\, 4107224.6734649837\, 3717055.3733837567\, 2510824.567439936)" +GEO, Georgia, "BBOX(4453109.470762285\, 5199824.4735734565\, 5401399.644378745\, 5019430.87461186)" +GRC, Greece, "BBOX(2186314.7988755554\, 3143444.7899599737\, 5123271.623236523\, 4154446.48763015)" +LBN, Lebanon, "BBOX(3907406.1875188733\, 4076936.6437751846\, 4116080.386414876\, 3903547.2121638493)" +SYR, Syria, "BBOX(3964583.8854840077\, 4717533.78165415\, 4479682.761680629\, 3804547.447187875)" +TUR, Turkey, "BBOX(2857106.79203054\, 4989400.245782474\, 5177469.827842194\, 4275668.354346954)" +AUT, Austria, "BBOX(1061272.4916527711\, 1910952.9027368103\, 6278042.62617315\, 5845892.142474166)" +CZE, Czech Republic, "BBOX(1346264.5256192111\, 2098619.3077916563\, 6630584.029505155\, 6204126.892396778)" +DNK, Denmark, "BBOX(900899.5106663116\, 1686397.1108695522\, 7914142.641677729\, 7277306.821832056)" +HUN, Hungary, "BBOX(1793557.3715133998\, 2548637.4774590665\, 6203250.422795402\, 5740109.762720737)" +POL, Poland, "BBOX(1574907.6352293568\, 2687639.1199670266\, 7330108.850656106\, 6275356.531185668)" +SVK, Slovakia, "BBOX(1875145.4300552672\, 2511151.0842176126\, 6377430.961535088\, 6063294.76382884)" +SVN, Slovenia, "BBOX(1489841.399198138\, 1848779.9652620046\, 5921897.448055978\, 5688808.783113411)" +SJM, Svalbard, "BBOX(1167509.6910790894\, 3744509.3710375\, 1.6048121551074298E7\, 1.2655555793739378E7)" +BEL, Belgium, "BBOX(282936.63088871894\, 712244.3658943777\, 6710441.719074484\, 6361653.309031685)" +FRA, France, "BBOX(-533251.7529219548\, 1064461.2384661005\, 6637425.700005567\, 5066318.240535327)" +DEU, Germany, "BBOX(652888.8134116667\, 1673556.9642057894\, 7372844.587967681\, 5987030.890923241)" +LIE, Liechtenstein, "BBOX(1054711.6548248013\, 1072439.8403286163\, 5987002.506696636\, 5951457.074129165)" +LUX, Luxembourg, "BBOX(638355.4972931738\, 726251.3634604733\, 6477821.694262034\, 6351301.791746342)" +NLD, Netherlands, "BBOX(375242.7526416523\, 802721.8423723045\, 7069632.465484033\, 6577873.226207013)" +CHE, Switzerland, "BBOX(664244.403346417\, 1167542.0850509058\, 6074750.670815664\, 5753058.221661312)" +USA, United States, "BBOX(-1.9838976150769826E7\, 2.001256564961837E7\, 1.1523520412740182E7\, 2146164.589200235)" +BLR, Belarus, "BBOX(2578760.5316635333\, 3644606.1393169463\, 7591830.885400406\, 6665963.6751351105)" +EST, Estonia, "BBOX(2430923.1272140685\, 3138551.853062327\, 8325466.382266233\, 7867699.765386352)" +FIN, Finland, "BBOX(2171998.1104861163\, 3515688.0389226186\, 1.1097617254588177E7\, 8356849.0793245975)" +LVA, Latvia, "BBOX(2334214.876198328\, 3143213.0227801744\, 7984826.971795753\, 7493955.154644284)" +LTU, Lithuania, "BBOX(2331345.838962512\, 2984815.5174770574\, 7648495.086573079\, 7149414.5404388225)" +MDA, Moldova, "BBOX(2964993.634990694\, 3353912.54367185\, 6185122.9269956285\, 5692430.167578349)" +ROM, Romania, "BBOX(2255447.2082748064\, 3303096.1980072116\, 6150868.213605207\, 5407332.237900151)" +UKR, Ukraine, "BBOX(2465887.5773919513\, 4472677.433490184\, 6868872.82154549\, 5524305.8506691335)" +IND, India, "BBOX(7585780.649085295\, 1.0840351679187058E7\, 4232806.675603967\, 752682.9865532124)" +MDV, Maldives, "BBOX(8111115.582462115\, 8197263.621304713\, 784297.2010665077\, -71431.20290758506)" +OMN, Oman, "BBOX(5788534.594925483\, 6662146.69277591\, 3044819.2631402686\, 1879282.0779841878)" +SOM, Somalia, "BBOX(4562831.081569439\, 5723081.7399744\, 1343337.2289440092\, -186472.5685638059)" +LKA, Sri Lanka, "BBOX(8871728.267099438\, 9116138.224105384\, 1099474.3430723047\, 659969.3086218301)" +TKM, Turkmenistan, "BBOX(5705144.162508433\, 7421768.6339453105\, 5280998.579824433\, 4183738.4781891424)" +UZB, Uzbekistan, "BBOX(6233612.182953193\, 8144973.85086014\, 5711801.139928842\, 4464923.610179015)" +YEM, Yemen, "BBOX(4737309.24391286\, 6063959.275257026\, 2154858.799301538\, 1362182.6880406907)" +ARM, Armenia, "BBOX(4837295.297334552\, 5189774.327307057\, 5056256.290729958\, 4698942.432854185)" +AZE, Azerbaijan, "BBOX(4984760.226767874\, 5752658.326798638\, 5268048.77475221\, 4616618.723595905)" +BHR, Bahrain, "BBOX(5616438.669684706\, 5654628.379468894\, 3034905.550106453\, 2946160.3652355284)" +IRN, Iran, "BBOX(4901948.6557028685\, 7049893.741177648\, 4833901.247983729\, 2885079.0840316075)" +KWT, Kuwait, "BBOX(5181582.214661229\, 5389710.255315938\, 3514372.934498193\, 3317085.938189461)" +QAT, Qatar, "BBOX(5649679.671506368\, 5745847.577713873\, 3017981.013632691\, 2821312.488451719)" +SAU, Saudi Arabia, "BBOX(3848553.5764954956\, 6196722.907460272\, 3783666.794569951\, 1848481.0463722278)" +ARE, United Arab Emirates, "BBOX(5742229.694263595\, 6276378.014364274\, 3009473.8025495554\, 2587735.5585281393)" +AFG, Afghanistan, "BBOX(6735292.615095663\, 8339581.582762433\, 4646317.28372925\, 3427436.851842879)" +KGZ, Kyrgyzstan, "BBOX(7708819.076615721\, 8936904.82707441\, 5345044.727405903\, 4749710.205362992)" +NPL, Nepal, "BBOX(8911370.139640821\, 9817772.840653224\, 3558261.041954822\, 3044776.39805181)" +PAK, Pakistan, "BBOX(6775605.521527455\, 8663319.92396695\, 4447583.65883328\, 2715440.846640232)" +TJK, Tajikistan, "BBOX(7499004.100397766\, 8369832.209103333\, 5019609.3336218465\, 4393464.385496015)" +BGD, Bangladesh, "BBOX(9800998.997143846\, 1.0315904296110207E7\, 3076839.5287209633\, 2361476.7409209567)" +BTN, Bhutan, "BBOX(9879820.321061922\, 1.025410784115321E7\, 3290010.9896438504\, 3086490.161301852)" +BRN, Brunei, "BBOX(1.2701006428488798E7\, 1.2841845733150518E7\, 563234.0022074429\, 447670.0898939893)" +CHN, China, "BBOX(8195345.9204370845\, 1.5002356674063785E7\, 7086089.890077106\, 2057325.3856844143)" +JPN, Japan, "BBOX(1.3767868049134541E7\, 1.623176311896106E7\, 5698420.16133248\, 2784071.2548644035)" +PRK, North Korea, "BBOX(1.3839679250759868E7\, 1.4549170017730366E7\, 5312900.3745006835\, 4533106.558340659)" +PLW, Palau, "BBOX(1.4967181830048332E7\, 1.4990157059749957E7\, 863059.693444481\, 815429.4880146481)" +PHL, Philippines, "BBOX(1.3018814446461111E7\, 1.4092828900986778E7\, 2201037.2202695687\, 562799.2811739098)" +KOR, South Korea, "BBOX(1.4037278471337833E7\, 1.4425544602525068E7\, 4668132.414354527\, 3920844.3714562915)" +KHM, Cambodia, "BBOX(1.139316126476325E7\, 1.1982027233402364E7\, 1655642.1223870981\, 1166706.2324655629)" +LAO, Laos, "BBOX(1.1142120562289124E7\, 1.1988580834463434E7\, 2571654.2509495416\, 1565804.2404149454)" +MYS, Malaysia, "BBOX(1.1092089575631922E7\, 1.32777233218629E7\, 820779.1279511156\, 94934.7631846226)" +MMR, Myanmar, "BBOX(1.0264212645289583E7\, 1.126212909591956E7\, 3318054.720285839\, 1100761.292465509)" +SGP, Singapore, "BBOX(1.1537257221127674E7\, 1.157699827933534E7\, 160905.1210847127\, 140165.52511697204)" +THA, Thailand, "BBOX(1.0836648747645825E7\, 1.1759712080245482E7\, 2326960.8760532974\, 628128.2178646458)" +VNM, Vietnam, "BBOX(1.137025572106285E7\, 1.2185570803468373E7\, 2671268.1479721097\, 956373.5794062541)" +GUM, Guam, "BBOX(1.610060037235469E7\, 1.613612854443387E7\, 1534354.7088998647\, 1486593.2644101644)" +MHL, Marshall Is., "BBOX(1.8069932221681617E7\, 1.9077718703641918E7\, 1642457.1731015244\, 624414.5801310536)" +FSM, Micronesia, "BBOX(1.76018490137313E7\, 1.8149851601056725E7\, 778674.0289479959\, 586550.7704269526)" +MNP, Northern Mariana Is., "BBOX(1.6205076831395375E7\, 1.6232394634432243E7\, 1720127.7032804906\, 1678605.9653024632)" +UMI, Wake I., "BBOX(1.854682692392445E7\, 1.8552751235904157E7\, 2193187.709933591\, 2187863.8226788775)" +BWA, Botswana, "BBOX(2225956.6714169392\, 3269856.198060967\, -2012057.3125287183\, -3107932.575048184)" +BDI, Burundi, "BBOX(3226595.4401938887\, 3434561.510989516\, -256277.86419111618\, -495653.34463959694)" +ATF, French Southern & Antarctic Lands, "BBOX(5749744.761766512\, 7855537.163585416\, -5833010.924598094\, -6398787.743617378)" +HMD, Heard I. & McDonald Is., "BBOX(8152450.513138738\, 8212470.976939865\, -6976553.288377103\, -7019975.393962887)" +KEN, Kenya, "BBOX(3774534.2414511004\, 4664861.406119774\, 515133.4762737857\, -520395.9201280237)" +RWA, Rwanda, "BBOX(3212062.1240753955\, 3439022.3056239635\, -117387.0182772328\, -314659.7811132031)" +TZA, Tanzania, "BBOX(3266205.9206388732\, 4501404.98655826\, -111015.40498408281\, -1316180.4208213643)" +ZMB, Zambia, "BBOX(2448627.045068894\, 3751720.8702890654\, -915014.476700008\, -2046319.4302683398)" +ZWE, Zimbabwe, "BBOX(2809472.180051312\, 3681512.6693309383\, -1760356.671722378\, -2561396.0054164226)" +ATA, Antarctica, "BBOX(-2.003750834E7\, 2.003750834E7\, -8512662.881033322\, -4.748140766343476E9)" +NOR, Bouvet I., "BBOX(372070.1471544857\, 387855.25094677455\, -7243144.612387524\, -7258293.454237509)" +COM, Comoros, "BBOX(4810563.480097139\, 4957103.455881589\, -1273745.795821429\, -1389333.8616461232)" +REU, Juan De Nova I., "BBOX(4755993.663842456\, 4760121.613199477\, -1926881.0822095312\, -1929687.4249448022)" +LSO, Lesotho, "BBOX(3007181.718244638\, 3278977.271857335\, -3321117.2692412077\, -3587446.106149188)" +MWI, Malawi, "BBOX(3638129.460024005\, 3998703.3094073967\, -1048497.2089351554\, -1936578.3607502843)" +MOZ, Mozambique, "BBOX(3363297.7786198338\, 4546968.054133605\, -1172181.8581435068\, -3106026.6491282047)" +ZAF, South Africa, "BBOX(1834915.5679635953\, 4218142.412200545\, -2527908.4975596936\, -5937134.146607068)" +SWZ, Swaziland, "BBOX(3428455.080322901\, 3577073.7249586442\, -2965472.9128583763\, -3163056.5390926218)" +AGO, Angola, "BBOX(1305916.2195893514\, 2681068.153065396\, -489058.770192694\, -2039467.1713562359)" +COG, Congo, "BBOX(1240172.93208683\, 2075397.0601399948\, 413407.92638141196\, -558981.4471095677)" +ZAR, Congo\, DRC, "BBOX(1359717.9313576685\, 3484608.750292371\, 599858.1461695591\, -1512112.8916449302)" +FJI, Fiji, "BBOX(-2.003750834E7\, 2.003750834E7\, -1822502.649701532\, -2174110.2636207)" +GAB, Gabon, "BBOX(968572.632860957\, 1616312.474546188\, 258097.85802697268\, -437302.607003333)" +NAM, Namibia, "BBOX(1304262.6798733384\, 2812423.14843234\, -1915491.159689654\, -3370794.2160844747)" +NZL, New Zealand, "BBOX(-1.9686713351283982E7\, 1.9908496063316472E7\, -4084625.39078185\, -6905327.726548656)" +IOT, British Indian Ocean Territory, "BBOX(8054844.581749367\, 8070026.5565406205\, -807374.1159864698\, -830132.9519243974)" +REU, Glorioso Is., "BBOX(5263084.334556216\, 5265751.883513724\, -1295003.534066991\, -1297694.4422191991)" +MDG, Madagascar, "BBOX(4813101.564486872\, 5621789.129689449\, -1339512.841638736\, -2948183.285092941)" +MUS, Mauritius, "BBOX(6379309.136233983\, 7068315.001831045\, -2234372.9783939887\, -2334800.8501905375)" +MYT, Mayotte, "BBOX(5013736.69021733\, 5042032.101022207\, -1421199.6616333937\, -1458875.4272419864)" +REU, Reunion, "BBOX(6147123.9517467795\, 6217533.529663724\, -2374779.1643490326\, -2436517.3438334884)" +SYC, Seychelles, "BBOX(5143593.993155349\, 6182746.188795668\, -507222.7096158059\, -1058244.6497234497)" +CXR, Christmas I., "BBOX(1.175856649136589E7\, 1.1772247656782478E7\, -1162331.3692172004\, -1176694.9418773586)" +CCK, Cocos Is., "BBOX(1.0777673796502084E7\, 1.0782945219668373E7\, -1360554.4203425802\, -1368415.0936628287)" +IDN, Indonesia, "BBOX(1.0598833913871005E7\, 1.5696829439852942E7\, 659456.6237303711\, -1224130.4157647756)" +TLS, Timor Leste, "BBOX(1.3808748684969299E7\, 1.4171927521756383E7\, -909204.3581778448\, -1058309.2029773812)" +AUS, Australia, "BBOX(1.2568773011020126E7\, 1.7695387664886124E7\, -1134231.265244234\, -7314247.137263005)" +NRU, Nauru, "BBOX(1.8579714820321366E7\, 1.8585573231162526E7\, -54918.590898148344\, -61473.57829423625)" +NCL, New Caledonia, "BBOX(1.8254475669742182E7\, 1.871620264608858E7\, -2283448.9342597914\, -2592628.621050228)" +NFK, Norfolk I., "BBOX(1.869176089341545E7\, 1.870154888228107E7\, -3375716.673702962\, -3385973.448600687)" +PNG, Papua New Guinea, "BBOX(1.5680335898821346E7\, 1.7362149763616595E7\, -150883.37308403326\, -1305049.300451269)" +SLB, Solomon Is., "BBOX(1.7329249844714675E7\, 1.858276697811951E7\, -736957.2370687899\, -1328168.5471204517)" +TUV, Tuvalu, "BBOX(1.962509790181899E7\, 1.9952046251859576E7\, -679153.8120624761\, -956604.9181074377)" +VUT, Vanuatu, "BBOX(1.8537103723002467E7\, 1.8912498315429542E7\, -1540647.6688226506\, -2303165.641357482)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox.json new file mode 100644 index 0000000000000..eb386b84ff70b --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox.json @@ -0,0 +1,13 @@ +{ + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "shape": { + "type": "geo_shape" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox_web.json b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox_web.json new file mode 100644 index 0000000000000..303c828c84285 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mapping-countries_bbox_web.json @@ -0,0 +1,13 @@ +{ + "properties": { + "id": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "shape": { + "type": "shape" + } + } +} diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index f3cb362c40e22..754d4a0e156cf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -46,10 +46,10 @@ median_absolute_deviation|"double|integer|long|unsigned_long median_absolute_dev min |"double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "The minimum value of a numeric field." | false | false | true mv_avg |"double mv_avg(field:double|integer|long|unsigned_long)" |field |"double|integer|long|unsigned_long" | "" |double | "Converts a multivalued field into a single valued field containing the average of all of the values." | false | false | false mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false | false -mv_count |"integer mv_count(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false +mv_count |"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false | false mv_dedupe |"boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|version" | "Remove duplicate values from a multivalued field." | false | false | false -mv_first |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false -mv_last |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false +mv_first |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the first value." | false | false | false +mv_last |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the last value." | false | false | false mv_max |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false | false mv_median |"double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | "" |"double|integer|long|unsigned_long" | "Converts a multivalued field into a single valued field containing the median value." | false | false | false mv_min |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" |v | "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "" |"boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version" | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false | false @@ -75,20 +75,22 @@ tau |double tau() to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | "Converts an input value to a boolean value." |false |false | false to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" |v |"cartesian_point|keyword|text" | |cartesian_point | "Converts an input value to a point value." |false |false | false +to_cartesianshape |"cartesian_shape to_cartesianshape(v:cartesian_shape|keyword|text)" |v |"cartesian_shape|keyword|text" | |cartesian_shape | "Converts an input value to a shape value." |false |false | false to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false to_degrees |"double to_degrees(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in radians to degrees." |false |false | false to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | "Converts an input value to a double value." |false |false | false to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | "Converts an input value to a date value." |false |false | false to_geopoint |"geo_point to_geopoint(v:geo_point|keyword|text)" |v |"geo_point|keyword|text" | |geo_point | "Converts an input value to a geo_point value." |false |false | false +to_geoshape |"geo_shape to_geoshape(v:geo_shape|keyword|text)" |v |"geo_shape|keyword|text" | |geo_shape | "Converts an input value to a geo_shape value." |false |false | false to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | "Converts an input value to an integer value." |false |false | false to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | "Converts an input string to an IP value." |false |false | false to_long |"long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |long | "Converts an input value to a long value." |false |false | false to_lower |"keyword|text to_lower(str:keyword|text)" |str |"keyword|text" | "The input string" |"keyword|text" | "Returns a new string representing the input string converted to lower case." |false |false | false to_radians |"double to_radians(v:double|integer|long|unsigned_long)" |v |"double|integer|long|unsigned_long" | |double | "Converts a number in degrees to radians." |false |false | false -to_str |"keyword to_str(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false -to_string |"keyword to_string(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_str |"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false +to_string |"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" |v |"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version" | |keyword | "Converts a field into a string." |false |false | false to_ul |"unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false to_ulong |"unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false to_unsigned_long |"unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |unsigned_long | "Converts an input value to an unsigned long value." |false |false | false @@ -139,10 +141,10 @@ double e() "double|integer|long|unsigned_long min(field:double|integer|long|unsigned_long)" "double mv_avg(field:double|integer|long|unsigned_long)" "keyword mv_concat(v:text|keyword, delim:text|keyword)" -"integer mv_count(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"integer mv_count(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" "boolean|date|double|integer|ip|keyword|long|text|version mv_dedupe(v:boolean|date|double|integer|ip|keyword|long|text|version)" -"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_first(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version mv_last(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_max(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" "double|integer|long|unsigned_long mv_median(v:double|integer|long|unsigned_long)" "boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version mv_min(v:boolean|date|double|integer|ip|keyword|long|text|unsigned_long|version)" @@ -167,21 +169,23 @@ double pi() double tau() "boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" "boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" -"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" +"cartesian_point to_cartesianpoint(v:cartesian_point|keyword|text)" +"cartesian_shape to_cartesianshape(v:cartesian_shape|keyword|text)" "date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" "double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "double to_degrees(v:double|integer|long|unsigned_long)" "double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" -"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" -"geo_point to_geopoint(v:geo_point|keyword|text)" +"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" +"geo_point to_geopoint(v:geo_point|keyword|text)" +"geo_shape to_geoshape(v:geo_shape|keyword|text)" "integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "ip to_ip(v:ip|keyword|text)" "long to_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "keyword|text to_lower(str:keyword|text)" "double to_radians(v:double|integer|long|unsigned_long)" -"keyword to_str(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" -"keyword to_string(v:boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version)" +"keyword to_str(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" +"keyword to_string(v:boolean|cartesian_point|cartesian_shape|date|double|geo_point|geo_shape|integer|ip|keyword|long|text|unsigned_long|version)" "unsigned_long to_ul(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "unsigned_long to_ulong(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "unsigned_long to_unsigned_long(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" @@ -212,5 +216,5 @@ countFunctions#[skip:-8.12.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -86 | 86 | 86 +88 | 88 | 88 ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec new file mode 100644 index 0000000000000..71d7c0dbdcfdd --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -0,0 +1,135 @@ +############################################### +# Tests for GEO_SHAPE type +# + +convertFromString#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +row wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" +| eval pt = to_geoshape(wkt); + +wkt:keyword |pt:geo_shape +"POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" | POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10)) +; + +convertFromStringArray#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +row wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] +| eval pt = to_geoshape(wkt); + +wkt:keyword |pt:geo_shape +["POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10))", "POINT(75.8092915005895 22.727749187571)"] |[POLYGON ((30 10\, 40 40\, 20 40\, 10 20\, 30 10)), POINT(75.8092915005895 22.727749187571)] +; + +# need to work out how to upload WKT +simpleLoad#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +FROM countries_bbox | WHERE id == "ISL"; + +id:keyword| name:keyword| shape:geo_shape +ISL|Iceland|BBOX(-24.538400, -13.499446, 66.536100, 63.390000) +; + +geo_shapeEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] + +ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] +| MV_EXPAND wkt +| EVAL pt = to_geoshape(wkt) +| WHERE pt == to_geoshape("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") +; + +wkt:keyword |pt:geo_shape +"POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" |POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10)) +; + +geo_shapeNotEquals#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] +| MV_EXPAND wkt +| EVAL pt = to_geoshape(wkt) +| WHERE pt != to_geoshape("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") +; + +wkt:keyword |pt:geo_shape +"POINT(75.8092915005895 22.727749187571)" |POINT(75.8092915005895 22.727749187571) +; + +convertFromStringParseError#[skip:-8.12.99, reason: spatial type geo_shape only added in 8.13] +row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] +| mv_expand wkt +| eval pt = to_geoshape(wkt) +; + +warning:Line 3:13: evaluation of [to_geoshape(wkt)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: Unknown geometry type: pointx +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +wkt:keyword |pt:geo_shape +"POINTX(42.97109630194 14.7552534413725)" |null +"POINT(75.8092915005895 22.727749187571)" |POINT(75.8092915005895 22.727749187571) +"POINT(111)" |null +; + +############################################### +# Tests for CARTESIAN_SHAPE type +# + +convertCartesianShapeFromString#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] + +row wkt = "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))" +| mv_expand wkt +| eval pt = to_cartesianshape(wkt) +; + +wkt:keyword |pt:cartesian_shape +"POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))" |POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97)) +; + +convertCartesianFromStringArray#[skip:-8.12.99, reason:spatial type cartesian_shape only added in 8.13] +row wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] +| eval pt = to_cartesianshape(wkt); + +wkt:keyword |pt:cartesian_shape +["POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] |[POLYGON ((3339584.72 1118889.97\, 4452779.63 4865942.27\, 2226389.81 4865942.27\, 1113194.90 2273030.92\, 3339584.72 1118889.97)), POINT(7580.93 2272.77)] +; + +# need to work out how to upload WKT +simpleCartesianShapeLoad#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +FROM countries_bbox_web | WHERE id == "ISL"; + +id:keyword| name:keyword|shape:cartesian_shape +ISL|Iceland|BBOX(-2731602.192501422, -1502751.454502109, 1.0025136653899286E7, 9196525.03584683) +; + +cartesianshapeEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] +| MV_EXPAND wkt +| EVAL pt = to_cartesianshape(wkt) +| WHERE pt == to_cartesianshape("POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))") +; + +wkt:keyword |pt:cartesian_shape +"POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))" |POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97)) +; + +cartesianShapeNotEquals#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] +| MV_EXPAND wkt +| EVAL pt = to_cartesianshape(wkt) +| WHERE pt != to_cartesianshape("POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))") +; + +wkt:keyword |pt:cartesian_shape +"POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) +; + +convertCartesianShapeFromStringParseError#[skip:-8.12.99, reason: spatial type cartesian_shape only added in 8.13] +row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] +| mv_expand wkt +| eval pt = to_cartesianshape(wkt) +; + +warning:Line 3:13: evaluation of [to_cartesianshape(wkt)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: Unknown geometry type: pointx +warning:Line 3:13: java.lang.IllegalArgumentException: Failed to parse WKT: expected number but found: ')' + +wkt:keyword |pt:cartesian_shape +"POINTX(4297.11 -1475.53)" |null +"POINT(7580.93 2272.77)" |POINT(7580.93 2272.77) +"POINT(111)" |null +; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java new file mode 100644 index 0000000000000..5ec9dcb94f67f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeFromStringEvaluator.java @@ -0,0 +1,125 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToCartesianShape}. + * This class is generated. Do not edit it. + */ +public final class ToCartesianShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToCartesianShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToCartesianShapeFromString"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendBytesRef(evalValue(vector, p, scratchPad)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToCartesianShape.fromKeyword(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + BytesRef value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToCartesianShape.fromKeyword(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToCartesianShapeFromStringEvaluator get(DriverContext context) { + return new ToCartesianShapeFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToCartesianShapeFromStringEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java new file mode 100644 index 0000000000000..68a6087d86953 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeFromStringEvaluator.java @@ -0,0 +1,125 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToGeoShape}. + * This class is generated. Do not edit it. + */ +public final class ToGeoShapeFromStringEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToGeoShapeFromStringEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToGeoShapeFromString"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + try { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } catch (IllegalArgumentException e) { + registerException(e); + return driverContext.blockFactory().newConstantNullBlock(positionCount); + } + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + try { + builder.appendBytesRef(evalValue(vector, p, scratchPad)); + } catch (IllegalArgumentException e) { + registerException(e); + builder.appendNull(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToGeoShape.fromKeyword(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + try { + BytesRef value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } catch (IllegalArgumentException e) { + registerException(e); + } + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToGeoShape.fromKeyword(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToGeoShapeFromStringEvaluator get(DriverContext context) { + return new ToGeoShapeFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToGeoShapeFromStringEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java new file mode 100644 index 0000000000000..5e466ddfbfddc --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromCartesianShapeEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. + * This class is generated. Do not edit it. + */ +public final class ToStringFromCartesianShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToStringFromCartesianShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToStringFromCartesianShape"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + builder.appendBytesRef(evalValue(vector, p, scratchPad)); + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToString.fromCartesianShape(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + BytesRef value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToString.fromCartesianShape(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromCartesianShapeEvaluator get(DriverContext context) { + return new ToStringFromCartesianShapeEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromCartesianShapeEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java new file mode 100644 index 0000000000000..df8e86e58fa69 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromGeoShapeEvaluator.java @@ -0,0 +1,110 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import java.lang.Override; +import java.lang.String; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.Vector; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link ToString}. + * This class is generated. Do not edit it. + */ +public final class ToStringFromGeoShapeEvaluator extends AbstractConvertFunction.AbstractEvaluator { + public ToStringFromGeoShapeEvaluator(EvalOperator.ExpressionEvaluator field, Source source, + DriverContext driverContext) { + super(driverContext, field, source); + } + + @Override + public String name() { + return "ToStringFromGeoShape"; + } + + @Override + public Block evalVector(Vector v) { + BytesRefVector vector = (BytesRefVector) v; + int positionCount = v.getPositionCount(); + BytesRef scratchPad = new BytesRef(); + if (vector.isConstant()) { + return driverContext.blockFactory().newConstantBytesRefBlockWith(evalValue(vector, 0, scratchPad), positionCount); + } + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + for (int p = 0; p < positionCount; p++) { + builder.appendBytesRef(evalValue(vector, p, scratchPad)); + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefVector container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToString.fromGeoShape(value); + } + + @Override + public Block evalBlock(Block b) { + BytesRefBlock block = (BytesRefBlock) b; + int positionCount = block.getPositionCount(); + try (BytesRefBlock.Builder builder = driverContext.blockFactory().newBytesRefBlockBuilder(positionCount)) { + BytesRef scratchPad = new BytesRef(); + for (int p = 0; p < positionCount; p++) { + int valueCount = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + int end = start + valueCount; + boolean positionOpened = false; + boolean valuesAppended = false; + for (int i = start; i < end; i++) { + BytesRef value = evalValue(block, i, scratchPad); + if (positionOpened == false && valueCount > 1) { + builder.beginPositionEntry(); + positionOpened = true; + } + builder.appendBytesRef(value); + valuesAppended = true; + } + if (valuesAppended == false) { + builder.appendNull(); + } else if (positionOpened) { + builder.endPositionEntry(); + } + } + return builder.build(); + } + } + + private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef scratchPad) { + BytesRef value = container.getBytesRef(index, scratchPad); + return ToString.fromGeoShape(value); + } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromGeoShapeEvaluator get(DriverContext context) { + return new ToStringFromGeoShapeEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromGeoShapeEvaluator[field=" + field + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java index 43a16872fd99a..79ce1754f7163 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ColumnInfo.java @@ -162,14 +162,14 @@ protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Pa return builder.value(UTC_DATE_TIME_FORMATTER.formatMillis(longVal)); } }; - case "geo_point" -> new PositionToXContent(block) { + case "geo_point", "geo_shape" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { return builder.value(GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(valueIndex, scratch))); } }; - case "cartesian_point" -> new PositionToXContent(block) { + case "cartesian_point", "cartesian_shape" -> new PositionToXContent(block) { @Override protected XContentBuilder valueToXContent(XContentBuilder builder, ToXContent.Params params, int valueIndex) throws IOException { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 63686820574b5..1763e36707958 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -39,6 +39,8 @@ public class EsqlQueryResponse extends ActionResponse implements ChunkedToXConte @SuppressWarnings("this-escape") private final AbstractRefCounted counted = AbstractRefCounted.of(this::closeInternal); + public static final String DROP_NULL_COLUMNS_OPTION = "drop_null_columns"; + private final List columns; private final List pages; private final Profile profile; @@ -160,20 +162,45 @@ private Iterator asyncPropertiesOrEmpty() { @Override public Iterator toXContentChunked(ToXContent.Params params) { - final Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar); + boolean dropNullColumns = params.paramAsBoolean(DROP_NULL_COLUMNS_OPTION, false); + boolean[] nullColumns = dropNullColumns ? nullColumns() : null; + Iterator columnHeadings = dropNullColumns + ? Iterators.concat( + ResponseXContentUtils.allColumns(columns, "all_columns"), + ResponseXContentUtils.nonNullColumns(columns, nullColumns, "columns") + ) + : ResponseXContentUtils.allColumns(columns, "columns"); + Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar, nullColumns); Iterator profileRender = profile == null ? List.of().iterator() : ChunkedToXContentHelper.field("profile", profile, params); return Iterators.concat( ChunkedToXContentHelper.startObject(), asyncPropertiesOrEmpty(), - ResponseXContentUtils.columnHeadings(columns), + columnHeadings, ChunkedToXContentHelper.array("values", valuesIt), profileRender, ChunkedToXContentHelper.endObject() ); } + private boolean[] nullColumns() { + boolean[] nullColumns = new boolean[columns.size()]; + for (int c = 0; c < nullColumns.length; c++) { + nullColumns[c] = allColumnsAreNull(c); + } + return nullColumns; + } + + private boolean allColumnsAreNull(int c) { + for (Page page : pages) { + if (page.getBlock(c).areAllValuesNull() == false) { + return false; + } + } + return true; + } + @Override public boolean isFragment() { return false; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java index 40bc90d8c5b0c..d5dc12357f3fe 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -101,8 +101,8 @@ private static Object valueAt(String dataType, Block block, int offset, BytesRef } case "boolean" -> ((BooleanBlock) block).getBoolean(offset); case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); - case "geo_point" -> GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); - case "cartesian_point" -> CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "geo_point", "geo_shape" -> GEO.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); + case "cartesian_point", "cartesian_shape" -> CARTESIAN.wkbToWkt(((BytesRefBlock) block).getBytesRef(offset, scratch)); case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; case "_source" -> { BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); @@ -161,12 +161,12 @@ static Page valuesToPage(BlockFactory blockFactory, List columns, Li throw new UncheckedIOException(e); } } - case "geo_point" -> { + case "geo_point", "geo_shape" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here BytesRef wkb = GEO.wktToWkb(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); } - case "cartesian_point" -> { + case "cartesian_point", "cartesian_shape" -> { // This just converts WKT to WKB, so does not need CRS knowledge, we could merge GEO and CARTESIAN here BytesRef wkb = CARTESIAN.wktToWkb(value.toString()); ((BytesRefBlock.Builder) builder).appendBytesRef(wkb); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java index e28e6beebabed..ca40faff81c55 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java @@ -23,10 +23,12 @@ */ final class ResponseXContentUtils { - /** Returns the column headings for the given columns. */ - static Iterator columnHeadings(List columns) { + /** + * Returns the column headings for the given columns. + */ + static Iterator allColumns(List columns, String name) { return ChunkedToXContentHelper.singleChunk((builder, params) -> { - builder.startArray("columns"); + builder.startArray(name); for (ColumnInfo col : columns) { col.toXContent(builder, params); } @@ -34,43 +36,62 @@ static Iterator columnHeadings(List columns) { }); } + /** + * Returns the column headings for the given columns, moving the heading + * for always-null columns to a {@code null_columns} section. + */ + static Iterator nonNullColumns(List columns, boolean[] nullColumns, String name) { + return ChunkedToXContentHelper.singleChunk((builder, params) -> { + builder.startArray(name); + for (int c = 0; c < columns.size(); c++) { + if (nullColumns[c] == false) { + columns.get(c).toXContent(builder, params); + } + } + return builder.endArray(); + }); + } + /** Returns the column values for the given pages (described by the column infos). */ - static Iterator columnValues(List columns, List pages, boolean columnar) { + static Iterator columnValues( + List columns, + List pages, + boolean columnar, + boolean[] nullColumns + ) { if (pages.isEmpty()) { return Collections.emptyIterator(); } else if (columnar) { - return columnarValues(columns, pages); + return columnarValues(columns, pages, nullColumns); } else { - return rowValues(columns, pages); + return rowValues(columns, pages, nullColumns); } } /** Returns a columnar based representation of the values in the given pages (described by the column infos). */ - static Iterator columnarValues(List columns, List pages) { + static Iterator columnarValues(List columns, List pages, boolean[] nullColumns) { final BytesRef scratch = new BytesRef(); - return Iterators.flatMap( - Iterators.forRange( - 0, - columns.size(), - column -> Iterators.concat( - Iterators.single(((builder, params) -> builder.startArray())), - Iterators.flatMap(pages.iterator(), page -> { - ColumnInfo.PositionToXContent toXContent = columns.get(column).positionToXContent(page.getBlock(column), scratch); - return Iterators.forRange( - 0, - page.getPositionCount(), - position -> (builder, params) -> toXContent.positionToXContent(builder, params, position) - ); - }), - ChunkedToXContentHelper.endArray() - ) - ), - Function.identity() - ); + return Iterators.flatMap(Iterators.forRange(0, columns.size(), column -> { + if (nullColumns != null && nullColumns[column]) { + return Collections.emptyIterator(); + } + return Iterators.concat( + Iterators.single(((builder, params) -> builder.startArray())), + Iterators.flatMap(pages.iterator(), page -> { + ColumnInfo.PositionToXContent toXContent = columns.get(column).positionToXContent(page.getBlock(column), scratch); + return Iterators.forRange( + 0, + page.getPositionCount(), + position -> (builder, params) -> toXContent.positionToXContent(builder, params, position) + ); + }), + ChunkedToXContentHelper.endArray() + ); + }), Function.identity()); } /** Returns a row based representation of the values in the given pages (described by the column infos). */ - static Iterator rowValues(List columns, List pages) { + static Iterator rowValues(List columns, List pages, boolean[] nullColumns) { final BytesRef scratch = new BytesRef(); return Iterators.flatMap(pages.iterator(), page -> { final int columnCount = columns.size(); @@ -82,7 +103,9 @@ static Iterator rowValues(List columns, List

    (builder, params) -> { builder.startArray(); for (int c = 0; c < columnCount; c++) { - toXContents[c].positionToXContent(builder, params, position); + if (nullColumns == null || nullColumns[c] == false) { + toXContents[c].positionToXContent(builder, params, position); + } } return builder.endArray(); }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 3dea461ccf8b7..0b2bad2eb22d3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -18,11 +18,11 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.esql.action.EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION; import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; @ServerlessScope(Scope.PUBLIC) @@ -60,6 +60,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override protected Set responseParams() { - return Collections.singleton(URL_PARAM_DELIMITER); + return Set.of(URL_PARAM_DELIMITER, DROP_NULL_COLUMNS_OPTION); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 6b8e7fc397865..070c0e112e051 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Set; @@ -65,6 +64,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override protected Set responseParams() { - return Collections.singleton(URL_PARAM_DELIMITER); + return Set.of(URL_PARAM_DELIMITER, EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index af5a0bd3f0b70..e0a36c8d81e82 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -43,7 +42,6 @@ import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.index.EsIndex; -import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.plan.TableIdentifier; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; @@ -209,52 +207,35 @@ protected LogicalPlan rule(Enrich plan, AnalyzerContext context) { // the policy does not exist return plan; } - String policyName = (String) plan.policyName().fold(); - EnrichPolicyResolution policyRes = context.enrichResolution() - .resolvedPolicies() - .stream() - .filter(x -> x.policyName().equals(policyName)) - .findFirst() - .orElse(new EnrichPolicyResolution(policyName, null, null)); - - IndexResolution idx = policyRes.index(); - EnrichPolicy policy = policyRes.policy(); - - var policyNameExp = policy == null || idx == null - ? new UnresolvedAttribute( - plan.policyName().source(), - policyName, - null, - unresolvedPolicyError(policyName, context.enrichResolution()) - ) - : plan.policyName(); - - var matchField = policy != null && (plan.matchField() == null || plan.matchField() instanceof EmptyAttribute) - ? new UnresolvedAttribute(plan.source(), policy.getMatchField()) - : plan.matchField(); - - List enrichFields = policy == null || idx == null - ? (plan.enrichFields() == null ? List.of() : plan.enrichFields()) - : calculateEnrichFields( + final String policyName = (String) plan.policyName().fold(); + final EnrichResolution.ResolvedPolicy resolvedPolicy = context.enrichResolution().getResolvedPolicy(policyName); + if (resolvedPolicy != null) { + EnrichPolicy policy = resolvedPolicy.policy(); + var matchField = plan.matchField() == null || plan.matchField() instanceof EmptyAttribute + ? new UnresolvedAttribute(plan.source(), policy.getMatchField()) + : plan.matchField(); + List enrichFields = calculateEnrichFields( plan.source(), policyName, - mappingAsAttributes(plan.source(), idx.get().mapping()), + mappingAsAttributes(plan.source(), resolvedPolicy.mapping()), plan.enrichFields(), policy ); - - return new Enrich(plan.source(), plan.child(), plan.mode(), policyNameExp, matchField, policyRes, enrichFields); - } - - private String unresolvedPolicyError(String policyName, EnrichResolution enrichResolution) { - List potentialMatches = StringUtils.findSimilar(policyName, enrichResolution.existingPolicies()); - String msg = "unresolved enrich policy [" + policyName + "]"; - if (CollectionUtils.isEmpty(potentialMatches) == false) { - msg += ", did you mean " - + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]" : "any of " + potentialMatches) - + "?"; + return new Enrich( + plan.source(), + plan.child(), + plan.mode(), + plan.policyName(), + matchField, + policy, + resolvedPolicy.concreteIndices(), + enrichFields + ); + } else { + String error = context.enrichResolution().getError(policyName); + var policyNameExp = new UnresolvedAttribute(plan.policyName().source(), policyName, null, error); + return new Enrich(plan.source(), plan.child(), plan.mode(), policyNameExp, plan.matchField(), null, Map.of(), List.of()); } - return msg; } public static List calculateEnrichFields( @@ -589,6 +570,7 @@ private LogicalPlan resolveEnrich(Enrich enrich, List childrenOutput) enrich.policyName(), resolved, enrich.policy(), + enrich.concreteIndices(), enrich.enrichFields() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java index 332e5e60565b6..deb683a94a8f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/EnrichResolution.java @@ -7,8 +7,73 @@ package org.elasticsearch.xpack.esql.analysis; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.ql.type.EsField; +import org.elasticsearch.xpack.ql.util.CollectionUtils; +import org.elasticsearch.xpack.ql.util.StringUtils; +import java.util.Collection; +import java.util.List; +import java.util.Map; import java.util.Set; -public record EnrichResolution(Set resolvedPolicies, Set existingPolicies) {} +/** + * Holds the resolution results of the enrich polices. + * The results and errors are collected via {@link #addResolvedPolicy} and {@link #addError}. + * And the results can be retrieved via {@link #getResolvedPolicy} and {@link #getError} + */ +public final class EnrichResolution { + + private final Map resolvedPolicies = ConcurrentCollections.newConcurrentMap(); // policy name -> resolved policy + private final Map errors = ConcurrentCollections.newConcurrentMap(); // policy to error + private final Set existingPolicies = ConcurrentCollections.newConcurrentSet(); // for suggestion + + public ResolvedPolicy getResolvedPolicy(String policyName) { + return resolvedPolicies.get(policyName); + } + + public Collection resolvedEnrichPolicies() { + return resolvedPolicies.values().stream().map(r -> r.policy).toList(); + } + + public String getError(String policyName) { + final String error = errors.get(policyName); + if (error != null) { + return error; + } + return notFoundError(policyName); + } + + public void addResolvedPolicy( + String policyName, + EnrichPolicy policy, + Map concreteIndices, + Map mapping + ) { + resolvedPolicies.put(policyName, new ResolvedPolicy(policy, concreteIndices, mapping)); + } + + public void addError(String policyName, String reason) { + errors.put(policyName, reason); + } + + public void addExistingPolicies(Set policyNames) { + existingPolicies.addAll(policyNames); + } + + private String notFoundError(String policyName) { + List potentialMatches = StringUtils.findSimilar(policyName, existingPolicies); + String msg = "unresolved enrich policy [" + policyName + "]"; + if (CollectionUtils.isEmpty(potentialMatches) == false) { + msg += ", did you mean " + + (potentialMatches.size() == 1 ? "[" + potentialMatches.get(0) + "]" : "any of " + potentialMatches) + + "?"; + } + return msg; + } + + public record ResolvedPolicy(EnrichPolicy policy, Map concreteIndices, Map mapping) { + + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d38dd57ff6aa8..b830e7a77f06a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -279,7 +279,9 @@ public static Failure validateBinaryComparison(BinaryComparison bc) { allowed.add(DataTypes.DATETIME); allowed.add(DataTypes.VERSION); allowed.add(EsqlDataTypes.GEO_POINT); + allowed.add(EsqlDataTypes.GEO_SHAPE); allowed.add(EsqlDataTypes.CARTESIAN_POINT); + allowed.add(EsqlDataTypes.CARTESIAN_SHAPE); if (bc instanceof Equals || bc instanceof NotEquals) { allowed.add(DataTypes.BOOLEAN); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolution.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolution.java deleted file mode 100644 index 5014fe1fcd1df..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolution.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.enrich; - -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.ql.index.IndexResolution; - -public record EnrichPolicyResolution(String policyName, EnrichPolicy policy, IndexResolution index) {} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java index 1e21886a7ac4b..d5783e5ef0100 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java @@ -11,12 +11,16 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; @@ -25,10 +29,14 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.enrich.EnrichMetadata; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.session.EsqlSession; +import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolver; +import java.util.Collection; +import java.util.List; import java.util.Map; import java.util.Set; @@ -48,36 +56,69 @@ public EnrichPolicyResolver(ClusterService clusterService, TransportService tran transportService.registerRequestHandler( RESOLVE_ACTION_NAME, threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME), - ResolveRequest::new, + LookupRequest::new, new RequestHandler() ); } - public void resolvePolicy(String policyName, ActionListener listener) { + public void resolvePolicy(Collection policyNames, ActionListener listener) { + if (policyNames.isEmpty()) { + listener.onResponse(new EnrichResolution()); + return; + } transportService.sendRequest( clusterService.localNode(), RESOLVE_ACTION_NAME, - new ResolveRequest(policyName), - new ActionListenerResponseHandler<>( - listener.map(r -> r.resolution), - ResolveResponse::new, - threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME) - ) + new LookupRequest(policyNames), + new ActionListenerResponseHandler<>(listener.delegateFailureAndWrap((l, lookup) -> { + final EnrichResolution resolution = new EnrichResolution(); + resolution.addExistingPolicies(lookup.allPolicies); + try (RefCountingListener refs = new RefCountingListener(l.map(unused -> resolution))) { + for (Map.Entry e : lookup.policies.entrySet()) { + resolveOnePolicy(e.getKey(), e.getValue(), resolution, refs.acquire()); + } + } + }), LookupResponse::new, threadPool.executor(EsqlPlugin.ESQL_THREAD_POOL_NAME)) ); } + private void resolveOnePolicy(String policyName, EnrichPolicy policy, EnrichResolution resolution, ActionListener listener) { + ThreadContext threadContext = threadPool.getThreadContext(); + listener = ContextPreservingActionListener.wrapPreservingContext(listener, threadContext); + try (ThreadContext.StoredContext ignored = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { + indexResolver.resolveAsMergedMapping( + EnrichPolicy.getBaseName(policyName), + IndexResolver.ALL_FIELDS, + false, + Map.of(), + listener.map(indexResult -> { + if (indexResult.isValid()) { + EsIndex esIndex = indexResult.get(); + Set indices = esIndex.concreteIndices(); + var concreteIndices = Map.of(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(indices, 0)); + resolution.addResolvedPolicy(policyName, policy, concreteIndices, esIndex.mapping()); + } else { + resolution.addError(policyName, indexResult.toString()); + } + return null; + }), + EsqlSession::specificValidity + ); + } + } + private static UnsupportedOperationException unsupported() { return new UnsupportedOperationException("local node transport action"); } - private static class ResolveRequest extends TransportRequest { - private final String policyName; + private static class LookupRequest extends TransportRequest { + private final Collection policyNames; - ResolveRequest(String policyName) { - this.policyName = policyName; + LookupRequest(Collection policyNames) { + this.policyNames = policyNames; } - ResolveRequest(StreamInput in) { + LookupRequest(StreamInput in) { throw unsupported(); } @@ -87,14 +128,16 @@ public void writeTo(StreamOutput out) { } } - private static class ResolveResponse extends TransportResponse { - private final EnrichPolicyResolution resolution; + private static class LookupResponse extends TransportResponse { + final Map policies; + final Set allPolicies; - ResolveResponse(EnrichPolicyResolution resolution) { - this.resolution = resolution; + LookupResponse(Map policies, Set allPolicies) { + this.policies = policies; + this.allPolicies = allPolicies; } - ResolveResponse(StreamInput in) { + LookupResponse(StreamInput in) { throw unsupported(); } @@ -104,38 +147,19 @@ public void writeTo(StreamOutput out) { } } - private class RequestHandler implements TransportRequestHandler { + private class RequestHandler implements TransportRequestHandler { @Override - public void messageReceived(ResolveRequest request, TransportChannel channel, Task task) throws Exception { - String policyName = request.policyName; - EnrichPolicy policy = policies().get(policyName); - ThreadContext threadContext = threadPool.getThreadContext(); - ActionListener listener = new ChannelActionListener<>(channel); - listener = ContextPreservingActionListener.wrapPreservingContext(listener, threadContext); - try (ThreadContext.StoredContext ignored = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { - indexResolver.resolveAsMergedMapping( - EnrichPolicy.getBaseName(policyName), - IndexResolver.ALL_FIELDS, - false, - Map.of(), - listener.map(indexResult -> new ResolveResponse(new EnrichPolicyResolution(policyName, policy, indexResult))), - EsqlSession::specificValidity - ); + public void messageReceived(LookupRequest request, TransportChannel channel, Task task) throws Exception { + final EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE); + final Map policies = metadata == null ? Map.of() : metadata.getPolicies(); + final Map results = Maps.newMapWithExpectedSize(request.policyNames.size()); + for (String policyName : request.policyNames) { + EnrichPolicy p = policies.get(policyName); + if (p != null) { + results.put(policyName, new EnrichPolicy(p.getType(), null, List.of(), p.getMatchField(), p.getEnrichFields())); + } } + new ChannelActionListener<>(channel).onResponse(new LookupResponse(results, policies.keySet())); } } - - public Set allPolicyNames() { - // TODO: remove this suggestion as it exposes policy names without the right permission - return policies().keySet(); - } - - private Map policies() { - if (clusterService == null || clusterService.state() == null) { - return Map.of(); - } - EnrichMetadata metadata = clusterService.state().metadata().custom(EnrichMetadata.TYPE); - return metadata == null ? Map.of() : metadata.getPolicies(); - } - } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java index 7f5a6079cc6d7..85b30032c1070 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java @@ -163,10 +163,7 @@ public final ExpressionEvaluator.Factory map(BinaryComparison bc, Layout layout) if (leftType == DataTypes.DATETIME) { return longs.apply(bc.source(), leftEval, rightEval); } - if (leftType == EsqlDataTypes.GEO_POINT) { - return geometries.apply(bc.source(), leftEval, rightEval, leftType); - } - if (leftType == EsqlDataTypes.CARTESIAN_POINT) { + if (EsqlDataTypes.isSpatial(leftType)) { return geometries.apply(bc.source(), leftEval, rightEval, leftType); } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index f8d9bfbc160a8..0264d2b42eb35 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -21,10 +21,12 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -177,10 +179,12 @@ private FunctionDefinition[][] functions() { new FunctionDefinition[] { def(ToBoolean.class, ToBoolean::new, "to_boolean", "to_bool"), def(ToCartesianPoint.class, ToCartesianPoint::new, "to_cartesianpoint"), + def(ToCartesianShape.class, ToCartesianShape::new, "to_cartesianshape"), def(ToDatetime.class, ToDatetime::new, "to_datetime", "to_dt"), def(ToDegrees.class, ToDegrees::new, "to_degrees"), def(ToDouble.class, ToDouble::new, "to_double", "to_dbl"), def(ToGeoPoint.class, ToGeoPoint::new, "to_geopoint"), + def(ToGeoShape.class, ToGeoShape::new, "to_geoshape"), def(ToIP.class, ToIP::new, "to_ip"), def(ToInteger.class, ToInteger::new, "to_integer", "to_int"), def(ToLong.class, ToLong::new, "to_long"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java new file mode 100644 index 0000000000000..64db9c6f015ed --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShape.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; + +public class ToCartesianShape extends AbstractConvertFunction { + + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(CARTESIAN_SHAPE, (fieldEval, source) -> fieldEval), + Map.entry(KEYWORD, ToCartesianShapeFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToCartesianShapeFromStringEvaluator.Factory::new) + ); + + @FunctionInfo(returnType = "cartesian_shape", description = "Converts an input value to a shape value.") + public ToCartesianShape(Source source, @Param(name = "v", type = { "cartesian_shape", "keyword", "text" }) Expression field) { + super(source, field); + } + + @Override + protected Map factories() { + return EVALUATORS; + } + + @Override + public DataType dataType() { + return CARTESIAN_SHAPE; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToCartesianShape(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToCartesianShape::new, field()); + } + + @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) + static BytesRef fromKeyword(BytesRef in) { + return CARTESIAN.wktToWkb(in.utf8ToString()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java new file mode 100644 index 0000000000000..075c5e753d76f --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShape.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; +import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.ql.type.DataTypes.TEXT; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + +public class ToGeoShape extends AbstractConvertFunction { + + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(GEO_SHAPE, (fieldEval, source) -> fieldEval), + Map.entry(KEYWORD, ToGeoShapeFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToGeoShapeFromStringEvaluator.Factory::new) + ); + + @FunctionInfo(returnType = "geo_shape", description = "Converts an input value to a geo_shape value.") + public ToGeoShape(Source source, @Param(name = "v", type = { "geo_shape", "keyword", "text" }) Expression field) { + super(source, field); + } + + @Override + protected Map factories() { + return EVALUATORS; + } + + @Override + public DataType dataType() { + return GEO_SHAPE; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new ToGeoShape(source(), newChildren.get(0)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, ToGeoShape::new, field()); + } + + @ConvertEvaluator(extraName = "FromString", warnExceptions = { IllegalArgumentException.class }) + static BytesRef fromKeyword(BytesRef in) { + return GEO.wktToWkb(in.utf8ToString()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index e157f508f9466..688996dd1db00 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -23,7 +23,9 @@ import java.util.Map; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.CARTESIAN_SHAPE; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_SHAPE; import static org.elasticsearch.xpack.ql.type.DataTypes.BOOLEAN; import static org.elasticsearch.xpack.ql.type.DataTypes.DATETIME; import static org.elasticsearch.xpack.ql.type.DataTypes.DOUBLE; @@ -53,7 +55,9 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper Map.entry(VERSION, ToStringFromVersionEvaluator.Factory::new), Map.entry(UNSIGNED_LONG, ToStringFromUnsignedLongEvaluator.Factory::new), Map.entry(GEO_POINT, ToStringFromGeoPointEvaluator.Factory::new), - Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new) + Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new), + Map.entry(CARTESIAN_SHAPE, ToStringFromCartesianShapeEvaluator.Factory::new), + Map.entry(GEO_SHAPE, ToStringFromGeoShapeEvaluator.Factory::new) ); @FunctionInfo(returnType = "keyword", description = "Converts a field into a string.") @@ -64,9 +68,11 @@ public ToString( type = { "boolean", "cartesian_point", + "cartesian_shape", "date", "double", "geo_point", + "geo_shape", "integer", "ip", "keyword", @@ -148,4 +154,14 @@ static BytesRef fromGeoPoint(BytesRef wkb) { static BytesRef fromCartesianPoint(BytesRef wkb) { return new BytesRef(CARTESIAN.wkbToWkt(wkb)); } + + @ConvertEvaluator(extraName = "FromCartesianShape") + static BytesRef fromCartesianShape(BytesRef wkb) { + return new BytesRef(GEO.wkbToWkt(wkb)); + } + + @ConvertEvaluator(extraName = "FromGeoShape") + static BytesRef fromGeoShape(BytesRef wkb) { + return new BytesRef(CARTESIAN.wkbToWkt(wkb)); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 29350203a966d..4fa89e66982e4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -39,9 +39,11 @@ public MvCount( type = { "boolean", "cartesian_point", + "cartesian_shape", "date", "double", "geo_point", + "geo_shape", "integer", "ip", "keyword", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java index 2bc8314959995..0f6bd847d68ed 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirst.java @@ -37,9 +37,11 @@ public class MvFirst extends AbstractMultivalueFunction { returnType = { "boolean", "cartesian_point", + "cartesian_shape", "date", "double", "geo_point", + "geo_shape", "integer", "ip", "keyword", @@ -56,9 +58,11 @@ public MvFirst( type = { "boolean", "cartesian_point", + "cartesian_shape", "date", "double", "geo_point", + "geo_shape", "integer", "ip", "keyword", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java index aad003a649cca..2881854d17f6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLast.java @@ -37,9 +37,11 @@ public class MvLast extends AbstractMultivalueFunction { returnType = { "boolean", "cartesian_point", + "cartesian_shape", "date", "double", "geo_point", + "geo_shape", "integer", "ip", "keyword", @@ -56,9 +58,11 @@ public MvLast( type = { "boolean", "cartesian_point", + "cartesian_shape", "date", "double", "geo_point", + "geo_shape", "integer", "ip", "keyword", diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index b6dce816db218..f7f7ecd0118dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -12,10 +12,13 @@ import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -40,10 +43,12 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Least; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToBoolean; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToCartesianShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDatetime; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDegrees; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToDouble; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoPoint; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToGeoShape; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIP; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; @@ -167,7 +172,6 @@ import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.ql.index.EsIndex; -import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; @@ -333,6 +337,8 @@ public static List namedTypeEntries() { of(ESQL_UNARY_SCLR_CLS, ToDatetime.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDegrees.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToDouble.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, ToGeoShape.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), + of(ESQL_UNARY_SCLR_CLS, ToCartesianShape.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToGeoPoint.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToIP.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), of(ESQL_UNARY_SCLR_CLS, ToInteger.class, PlanNamedTypes::writeESQLUnaryScalar, PlanNamedTypes::readESQLUnaryScalar), @@ -482,15 +488,25 @@ static void writeEvalExec(PlanStreamOutput out, EvalExec evalExec) throws IOExce } static EnrichExec readEnrichExec(PlanStreamInput in) throws IOException { - return new EnrichExec( - in.readSource(), - in.readPhysicalPlanNode(), - in.readNamedExpression(), - in.readString(), - in.readString(), - readEsIndex(in), - readNamedExpressions(in) - ); + final Source source = in.readSource(); + final PhysicalPlan child = in.readPhysicalPlanNode(); + final NamedExpression matchField = in.readNamedExpression(); + final String policyName = in.readString(); + final String policyMatchField = in.readString(); + final Map concreteIndices; + final Enrich.Mode mode; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + mode = in.readEnum(Enrich.Mode.class); + concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); + } else { + mode = Enrich.Mode.ANY; + EsIndex esIndex = readEsIndex(in); + if (esIndex.concreteIndices().size() != 1) { + throw new IllegalStateException("expected a single concrete enrich index; got " + esIndex.concreteIndices()); + } + concreteIndices = Map.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); + } + return new EnrichExec(source, child, mode, matchField, policyName, policyMatchField, concreteIndices, readNamedExpressions(in)); } static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOException { @@ -499,7 +515,17 @@ static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOEx out.writeNamedExpression(enrich.matchField()); out.writeString(enrich.policyName()); out.writeString(enrich.policyMatchField()); - writeEsIndex(out, enrich.enrichIndex()); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + out.writeEnum(enrich.mode()); + out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); + } else { + if (enrich.concreteIndices().keySet().equals(Set.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY))) { + String concreteIndex = enrich.concreteIndices().get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + writeEsIndex(out, new EsIndex(concreteIndex, Map.of(), Set.of(concreteIndex))); + } else { + throw new IllegalStateException("expected a single concrete enrich index; got " + enrich.concreteIndices()); + } + } writeNamedExpressions(out, enrich.enrichFields()); } @@ -725,19 +751,29 @@ static void writeEval(PlanStreamOutput out, Eval eval) throws IOException { } static Enrich readEnrich(PlanStreamInput in) throws IOException { - Enrich.Mode m = Enrich.Mode.ANY; + Enrich.Mode mode = Enrich.Mode.ANY; if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { - m = in.readEnum(Enrich.Mode.class); + mode = in.readEnum(Enrich.Mode.class); } - return new Enrich( - in.readSource(), - in.readLogicalPlanNode(), - m, - in.readExpression(), - in.readNamedExpression(), - new EnrichPolicyResolution(in.readString(), new EnrichPolicy(in), IndexResolution.valid(readEsIndex(in))), - readNamedExpressions(in) - ); + final Source source = in.readSource(); + final LogicalPlan child = in.readLogicalPlanNode(); + final Expression policyName = in.readExpression(); + final NamedExpression matchField = in.readNamedExpression(); + if (in.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + in.readString(); // discard the old policy name + } + final EnrichPolicy policy = new EnrichPolicy(in); + final Map concreteIndices; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); + } else { + EsIndex esIndex = readEsIndex(in); + if (esIndex.concreteIndices().size() > 1) { + throw new IllegalStateException("expected a single enrich index; got " + esIndex); + } + concreteIndices = Map.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); + } + return new Enrich(source, child, mode, policyName, matchField, policy, concreteIndices, readNamedExpressions(in)); } static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException { @@ -749,9 +785,22 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException out.writeLogicalPlanNode(enrich.child()); out.writeExpression(enrich.policyName()); out.writeNamedExpression(enrich.matchField()); - out.writeString(enrich.policy().policyName()); - enrich.policy().policy().writeTo(out); - writeEsIndex(out, enrich.policy().index().get()); + if (out.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + out.writeString(BytesRefs.toString(enrich.policyName().fold())); // old policy name + } + enrich.policy().writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); + } else { + Map concreteIndices = enrich.concreteIndices(); + if (concreteIndices.keySet().equals(Set.of(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY))) { + String enrichIndex = concreteIndices.get(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + EsIndex esIndex = new EsIndex(enrichIndex, Map.of(), Set.of(enrichIndex)); + writeEsIndex(out, esIndex); + } else { + throw new IllegalStateException("expected a single enrich index; got " + concreteIndices); + } + } writeNamedExpressions(out, enrich.enrichFields()); } @@ -1180,6 +1229,8 @@ static void writeBinaryLogic(PlanStreamOutput out, BinaryLogic binaryLogic) thro entry(name(ToDatetime.class), ToDatetime::new), entry(name(ToDegrees.class), ToDegrees::new), entry(name(ToDouble.class), ToDouble::new), + entry(name(ToGeoShape.class), ToGeoShape::new), + entry(name(ToCartesianShape.class), ToCartesianShape::new), entry(name(ToGeoPoint.class), ToGeoPoint::new), entry(name(ToIP.class), ToIP::new), entry(name(ToInteger.class), ToInteger::new), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 5e90f6e8e44c9..a6ce2db548504 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -330,6 +330,7 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { new Literal(source(ctx.policyName), policyName, DataTypes.KEYWORD), matchField, null, + Map.of(), keepClauses.isEmpty() ? List.of() : keepClauses ); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java index 37a0ff0fe5001..d5db90aa07325 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/Enrich.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.common.util.Maps; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.ql.capabilities.Resolvables; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.EmptyAttribute; @@ -29,8 +29,9 @@ public class Enrich extends UnaryPlan { private final Expression policyName; private final NamedExpression matchField; - private final EnrichPolicyResolution policy; - private List enrichFields; + private final EnrichPolicy policy; + private final Map concreteIndices; // cluster -> enrich indices + private final List enrichFields; private List output; private final Mode mode; @@ -61,7 +62,8 @@ public Enrich( Mode mode, Expression policyName, NamedExpression matchField, - EnrichPolicyResolution policy, + EnrichPolicy policy, + Map concreteIndices, List enrichFields ) { super(source, child); @@ -69,6 +71,7 @@ public Enrich( this.policyName = policyName; this.matchField = matchField; this.policy = policy; + this.concreteIndices = concreteIndices; this.enrichFields = enrichFields; } @@ -80,10 +83,14 @@ public List enrichFields() { return enrichFields; } - public EnrichPolicyResolution policy() { + public EnrichPolicy policy() { return policy; } + public Map concreteIndices() { + return concreteIndices; + } + public Expression policyName() { return policyName; } @@ -102,12 +109,12 @@ public boolean expressionsResolved() { @Override public UnaryPlan replaceChild(LogicalPlan newChild) { - return new Enrich(source(), newChild, mode, policyName, matchField, policy, enrichFields); + return new Enrich(source(), newChild, mode, policyName, matchField, policy, concreteIndices, enrichFields); } @Override protected NodeInfo info() { - return NodeInfo.create(this, Enrich::new, child(), mode, policyName, matchField, policy, enrichFields); + return NodeInfo.create(this, Enrich::new, child(), mode, policyName, matchField, policy, concreteIndices, enrichFields); } @Override @@ -131,11 +138,12 @@ public boolean equals(Object o) { && Objects.equals(policyName, enrich.policyName) && Objects.equals(matchField, enrich.matchField) && Objects.equals(policy, enrich.policy) + && Objects.equals(concreteIndices, enrich.concreteIndices) && Objects.equals(enrichFields, enrich.enrichFields); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), mode, policyName, matchField, policy, enrichFields); + return Objects.hash(super.hashCode(), mode, policyName, matchField, policy, concreteIndices, enrichFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java index 6f2b83ef0aa6f..0bfaa2db2be5d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EnrichExec.java @@ -6,23 +6,25 @@ */ package org.elasticsearch.xpack.esql.plan.physical; +import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.NamedExpression; -import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; import java.util.List; +import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputAttributes; public class EnrichExec extends UnaryExec implements EstimatesRowSize { + private final Enrich.Mode mode; private final NamedExpression matchField; private final String policyName; private final String policyMatchField; - private final EsIndex enrichIndex; + private final Map concreteIndices; // cluster -> enrich index private final List enrichFields; /** @@ -32,42 +34,58 @@ public class EnrichExec extends UnaryExec implements EstimatesRowSize { * @param matchField the match field in the source data * @param policyName the enrich policy name * @param policyMatchField the match field name in the policy - * @param enrichIndex the enricy policy index (the system index created by the policy execution, not the source index) + * @param concreteIndices a map from cluster to concrete enrich indices * @param enrichFields the enrich fields */ public EnrichExec( Source source, PhysicalPlan child, + Enrich.Mode mode, NamedExpression matchField, String policyName, String policyMatchField, - EsIndex enrichIndex, + Map concreteIndices, List enrichFields ) { super(source, child); + this.mode = mode; this.matchField = matchField; this.policyName = policyName; this.policyMatchField = policyMatchField; - this.enrichIndex = enrichIndex; + this.concreteIndices = concreteIndices; this.enrichFields = enrichFields; } @Override protected NodeInfo info() { - return NodeInfo.create(this, EnrichExec::new, child(), matchField, policyName, policyMatchField, enrichIndex, enrichFields); + return NodeInfo.create( + this, + EnrichExec::new, + child(), + mode, + matchField, + policyName, + policyMatchField, + concreteIndices, + enrichFields + ); } @Override public EnrichExec replaceChild(PhysicalPlan newChild) { - return new EnrichExec(source(), newChild, matchField, policyName, policyMatchField, enrichIndex, enrichFields); + return new EnrichExec(source(), newChild, mode, matchField, policyName, policyMatchField, concreteIndices, enrichFields); + } + + public Enrich.Mode mode() { + return mode; } public NamedExpression matchField() { return matchField; } - public EsIndex enrichIndex() { - return enrichIndex; + public Map concreteIndices() { + return concreteIndices; } public List enrichFields() { @@ -99,15 +117,16 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; if (super.equals(o) == false) return false; EnrichExec that = (EnrichExec) o; - return Objects.equals(matchField, that.matchField) + return mode.equals(that.mode) + && Objects.equals(matchField, that.matchField) && Objects.equals(policyName, that.policyName) && Objects.equals(policyMatchField, that.policyMatchField) - && Objects.equals(enrichIndex, that.enrichIndex) + && Objects.equals(concreteIndices, that.concreteIndices) && Objects.equals(enrichFields, that.enrichFields); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), matchField, policyName, policyMatchField, enrichIndex, enrichFields); + return Objects.hash(super.hashCode(), mode, matchField, policyName, policyMatchField, concreteIndices, enrichFields); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index d79becfc8a736..8c9ab8afe41f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -9,7 +9,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.Describable; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -93,7 +92,6 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; -import java.util.Set; import java.util.function.Function; import java.util.stream.Stream; @@ -110,6 +108,7 @@ public class LocalExecutionPlanner { private static final Logger logger = LogManager.getLogger(LocalExecutionPlanner.class); private final String sessionId; + private final String clusterAlias; private final CancellableTask parentTask; private final BigArrays bigArrays; private final BlockFactory blockFactory; @@ -122,6 +121,7 @@ public class LocalExecutionPlanner { public LocalExecutionPlanner( String sessionId, + String clusterAlias, CancellableTask parentTask, BigArrays bigArrays, BlockFactory blockFactory, @@ -133,6 +133,7 @@ public LocalExecutionPlanner( PhysicalOperationProviders physicalOperationProviders ) { this.sessionId = sessionId; + this.clusterAlias = clusterAlias; this.parentTask = parentTask; this.bigArrays = bigArrays; this.blockFactory = blockFactory; @@ -343,7 +344,7 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case "version" -> TopNEncoder.VERSION; case "boolean", "null", "byte", "short", "integer", "long", "double", "float", "half_float", "datetime", "date_period", "time_duration", "object", "nested", "scaled_float", "unsigned_long", "_doc" -> TopNEncoder.DEFAULT_SORTABLE; - case "geo_point", "cartesian_point" -> TopNEncoder.DEFAULT_UNSORTABLE; + case "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point case "unsupported" -> TopNEncoder.UNSUPPORTED; default -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); @@ -456,11 +457,10 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon Layout.Builder layoutBuilder = source.layout.builder(); layoutBuilder.append(enrich.enrichFields()); Layout layout = layoutBuilder.build(); - Set indices = enrich.enrichIndex().concreteIndices(); - if (indices.size() != 1) { - throw new EsqlIllegalArgumentException("Resolved enrich should have one concrete index; got " + indices); + String enrichIndex = enrich.concreteIndices().get(clusterAlias); + if (enrichIndex == null) { + throw new EsqlIllegalArgumentException("No concrete enrich index for cluster [" + clusterAlias + "]"); } - String enrichIndex = Iterables.get(indices, 0); return source.with( new EnrichLookupOperator.Factory( sessionId, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 3eea84b0bd1f9..9410e9e97d078 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.planner; +import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -142,10 +143,11 @@ private PhysicalPlan map(UnaryPlan p, PhysicalPlan child) { return new EnrichExec( enrich.source(), child, + enrich.mode(), enrich.matchField(), - enrich.policy().policyName(), - enrich.policy().policy().getMatchField(), - enrich.policy().index().get(), + BytesRefs.toString(enrich.policyName().fold()), + enrich.policy().getMatchField(), + enrich.concreteIndices(), enrich.enrichFields() ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 1c20e55f289c3..933b0174aebc0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -197,11 +197,10 @@ static QueryBuilder detectFilter(PhysicalPlan plan, String fieldName, Predicate< /** * Map QL's {@link DataType} to the compute engine's {@link ElementType}, for sortable types only. - * This specifically excludes GEO_POINT and CARTESIAN_POINT, which are backed by DataType.LONG - * but are not themselves sortable (the long can be sorted, but the sort order is not usually useful). + * This specifically excludes spatial data types, which are not themselves sortable. */ public static ElementType toSortableElementType(DataType dataType) { - if (dataType == EsqlDataTypes.GEO_POINT || dataType == EsqlDataTypes.CARTESIAN_POINT) { + if (EsqlDataTypes.isSpatial(dataType)) { return ElementType.UNKNOWN; } return toElementType(dataType); @@ -238,11 +237,7 @@ public static ElementType toElementType(DataType dataType) { if (dataType == EsQueryExec.DOC_DATA_TYPE) { return ElementType.DOC; } - // TODO: Spatial types can be read from source into BYTES_REF, or read from doc-values into LONG - if (dataType == EsqlDataTypes.GEO_POINT) { - return ElementType.BYTES_REF; - } - if (dataType == EsqlDataTypes.CARTESIAN_POINT) { + if (EsqlDataTypes.isSpatial(dataType)) { return ElementType.BYTES_REF; } throw EsqlIllegalArgumentException.illegalDataType(dataType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index e781ed4a60c35..172fc0a3dc5cc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -155,7 +155,14 @@ public void execute( .groupIndices(SearchRequest.DEFAULT_INDICES_OPTIONS, PlannerUtils.planConcreteIndices(physicalPlan).toArray(String[]::new)); QueryPragmas queryPragmas = configuration.pragmas(); if (dataNodePlan == null || clusterToConcreteIndices.values().stream().allMatch(v -> v.indices().length == 0)) { - var computeContext = new ComputeContext(sessionId, List.of(), configuration, null, null); + var computeContext = new ComputeContext( + sessionId, + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, + List.of(), + configuration, + null, + null + ); runCompute( rootTask, computeContext, @@ -187,7 +194,7 @@ public void execute( // run compute on the coordinator runCompute( rootTask, - new ComputeContext(sessionId, List.of(), configuration, exchangeSource, null), + new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), coordinatorPlan, cancelOnFailure(rootTask, cancelled, refs.acquire()).map(driverProfiles -> { responseHeadersCollector.collect(); @@ -378,6 +385,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, try { LocalExecutionPlanner planner = new LocalExecutionPlanner( context.sessionId, + context.clusterAlias, task, bigArrays, blockFactory, @@ -570,13 +578,14 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T ); final ActionListener listener = new ChannelActionListener<>(channel); final EsqlConfiguration configuration = request.configuration(); + String clusterAlias = request.clusterAlias(); acquireSearchContexts( - request.clusterAlias(), + clusterAlias, request.shardIds(), configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { - var computeContext = new ComputeContext(sessionId, searchContexts, configuration, null, exchangeSink); + var computeContext = new ComputeContext(sessionId, clusterAlias, searchContexts, configuration, null, exchangeSink); runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { // don't return until all pages are fetched exchangeSink.addCompletionListener( @@ -669,7 +678,7 @@ void runComputeOnRemoteCluster( ); runCompute( parentTask, - new ComputeContext(localSessionId, List.of(), configuration, exchangeSource, exchangeSink), + new ComputeContext(localSessionId, clusterAlias, List.of(), configuration, exchangeSource, exchangeSink), coordinatorPlan, cancelOnFailure(parentTask, cancelled, refs.acquire()).map(driverProfiles -> { responseHeadersCollector.collect(); @@ -702,6 +711,7 @@ void runComputeOnRemoteCluster( record ComputeContext( String sessionId, + String clusterAlias, List searchContexts, EsqlConfiguration configuration, ExchangeSourceHandler exchangeSource, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 284c78c6e0121..add6a0d24994c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -9,12 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; -import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; @@ -52,7 +51,6 @@ import org.elasticsearch.xpack.ql.type.InvalidMappedField; import org.elasticsearch.xpack.ql.util.Holder; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -150,32 +148,18 @@ public void analyzedPlan(LogicalPlan parsed, ActionListener listene private void preAnalyze(LogicalPlan parsed, BiFunction action, ActionListener listener) { PreAnalyzer.PreAnalysis preAnalysis = preAnalyzer.preAnalyze(parsed); - Set policyNames = new HashSet<>(preAnalysis.policyNames); - EnrichResolution resolution = new EnrichResolution(ConcurrentCollections.newConcurrentSet(), enrichPolicyResolver.allPolicyNames()); - - ActionListener groupedListener = listener.delegateFailureAndWrap((l, unused) -> { - assert resolution.resolvedPolicies().size() == policyNames.size() - : resolution.resolvedPolicies().size() + " != " + policyNames.size(); - + enrichPolicyResolver.resolvePolicy(preAnalysis.policyNames, listener.delegateFailureAndWrap((l, enrichResolution) -> { // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API - var matchFields = resolution.resolvedPolicies() - .stream() - .filter(p -> p.index().isValid()) // only if the policy by the specified name was found; later the Verifier will be - // triggered - .map(p -> p.policy().getMatchField()) + var matchFields = enrichResolution.resolvedEnrichPolicies() + .stream() // triggered + .map(EnrichPolicy::getMatchField) .collect(Collectors.toSet()); - preAnalyzeIndices( parsed, - l.delegateFailureAndWrap((ll, indexResolution) -> ll.onResponse(action.apply(indexResolution, resolution))), + l.delegateFailureAndWrap((ll, indexResolution) -> ll.onResponse(action.apply(indexResolution, enrichResolution))), matchFields ); - }); - try (RefCountingListener refs = new RefCountingListener(groupedListener)) { - for (String policyName : policyNames) { - enrichPolicyResolver.resolvePolicy(policyName, refs.acquire(resolution.resolvedPolicies()::add)); - } - } + })); } private void preAnalyzeIndices(LogicalPlan parsed, ActionListener listener, Set enrichPolicyMatchFields) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java index eae808abb5037..e8cc5a77291bb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/type/EsqlDataTypes.java @@ -47,6 +47,8 @@ public final class EsqlDataTypes { public static final DataType TIME_DURATION = new DataType("TIME_DURATION", null, Integer.BYTES + Long.BYTES, false, false, false); public static final DataType GEO_POINT = new DataType("geo_point", Double.BYTES * 2, false, false, false); public static final DataType CARTESIAN_POINT = new DataType("cartesian_point", Double.BYTES * 2, false, false, false); + public static final DataType GEO_SHAPE = new DataType("geo_shape", Integer.MAX_VALUE, false, false, false); + public static final DataType CARTESIAN_SHAPE = new DataType("cartesian_shape", Integer.MAX_VALUE, false, false, false); private static final Collection TYPES = Stream.of( BOOLEAN, @@ -72,7 +74,9 @@ public final class EsqlDataTypes { VERSION, UNSIGNED_LONG, GEO_POINT, - CARTESIAN_POINT + CARTESIAN_POINT, + CARTESIAN_SHAPE, + GEO_SHAPE ).sorted(Comparator.comparing(DataType::typeName)).toList(); private static final Map NAME_TO_TYPE = TYPES.stream().collect(toUnmodifiableMap(DataType::typeName, t -> t)); @@ -83,6 +87,7 @@ public final class EsqlDataTypes { Map map = TYPES.stream().filter(e -> e.esType() != null).collect(toMap(DataType::esType, t -> t)); // ES calls this 'point', but ESQL calls it 'cartesian_point' map.put("point", CARTESIAN_POINT); + map.put("shape", CARTESIAN_SHAPE); ES_TO_TYPE = Collections.unmodifiableMap(map); } @@ -167,7 +172,7 @@ public static boolean isNullOrTimeDuration(DataType t) { } public static boolean isSpatial(DataType t) { - return t == GEO_POINT || t == CARTESIAN_POINT; + return t == GEO_POINT || t == CARTESIAN_POINT || t == GEO_SHAPE || t == CARTESIAN_SHAPE; } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 39a7eee2e616d..0f6dbfb81f141 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; @@ -39,6 +40,7 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; @@ -48,7 +50,6 @@ import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; @@ -89,7 +90,6 @@ import java.net.URL; import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -263,18 +263,18 @@ private static IndexResolution loadIndexResolution(String mappingName, String in } private static EnrichResolution loadEnrichPolicies() { - Set names = new HashSet<>(); - Set resolutions = new HashSet<>(); + EnrichResolution enrichResolution = new EnrichResolution(); for (CsvTestsDataLoader.EnrichConfig policyConfig : CsvTestsDataLoader.ENRICH_POLICIES) { EnrichPolicy policy = loadEnrichPolicyMapping(policyConfig.policyFileName()); CsvTestsDataLoader.TestsDataset sourceIndex = CSV_DATASET_MAP.get(policy.getIndices().get(0)); // this could practically work, but it's wrong: // EnrichPolicyResolution should contain the policy (system) index, not the source index - IndexResolution idxRes = loadIndexResolution(sourceIndex.mappingFileName(), sourceIndex.indexName()); - names.add(policyConfig.policyName()); - resolutions.add(new EnrichPolicyResolution(policyConfig.policyName(), policy, idxRes)); + EsIndex esIndex = loadIndexResolution(sourceIndex.mappingFileName(), sourceIndex.indexName()).get(); + var concreteIndices = Map.of(RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY, Iterables.get(esIndex.concreteIndices(), 0)); + enrichResolution.addResolvedPolicy(policyConfig.policyName(), policy, concreteIndices, esIndex.mapping()); + enrichResolution.addExistingPolicies(Set.of(policyConfig.policyName())); } - return new EnrichResolution(resolutions, names); + return enrichResolution; } private static EnrichPolicy loadEnrichPolicyMapping(String policyFileName) { @@ -338,6 +338,7 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { ); LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( sessionId, + "", new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()), bigArrays, blockFactory, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 24e356520ff3d..3b64870a15839 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ParserConstructor; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -56,10 +57,13 @@ import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.stream.Stream; +import static org.elasticsearch.common.xcontent.ChunkedToXContent.wrapAsToXContent; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.esql.action.EsqlQueryResponse.DROP_NULL_COLUMNS_OPTION; import static org.elasticsearch.xpack.esql.action.ResponseValueUtils.valuesToPage; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; @@ -152,6 +156,12 @@ private Page randomPage(List columns) { case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(randomIdentifier()).toBytesRef()); case "geo_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(GEO.asWkb(GeometryTestUtils.randomPoint())); case "cartesian_point" -> ((BytesRefBlock.Builder) builder).appendBytesRef(CARTESIAN.asWkb(ShapeTestUtils.randomPoint())); + case "geo_shape" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + GEO.asWkb(GeometryTestUtils.randomGeometry(randomBoolean())) + ); + case "cartesian_shape" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean())) + ); case "null" -> builder.appendNull(); case "_source" -> { try { @@ -323,28 +333,38 @@ public void testChunkResponseSizeRows() { public void testSimpleXContentColumnar() { try (EsqlQueryResponse response = simple(true)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""")); } } + public void testSimpleXContentColumnarDropNulls() { + try (EsqlQueryResponse response = simple(true)) { + assertThat( + Strings.toString(wrapAsToXContent(response), new ToXContent.MapParams(Map.of(DROP_NULL_COLUMNS_OPTION, "true"))), + equalTo(""" + {"all_columns":[{"name":"foo","type":"integer"}],"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""") + ); + } + } + public void testSimpleXContentColumnarAsync() { try (EsqlQueryResponse response = simple(true, true)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"is_running":false,"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""")); } } public void testSimpleXContentRows() { try (EsqlQueryResponse response = simple(false)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); } } public void testSimpleXContentRowsAsync() { try (EsqlQueryResponse response = simple(false, true)) { - assertThat(Strings.toString(response), equalTo(""" + assertThat(Strings.toString(wrapAsToXContent(response)), equalTo(""" {"is_running":false,"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); } } @@ -366,6 +386,58 @@ public void testBasicXContentIdAndRunning() { } } + public void testNullColumnsXContentDropNulls() { + try ( + EsqlQueryResponse response = new EsqlQueryResponse( + List.of(new ColumnInfo("foo", "integer"), new ColumnInfo("all_null", "integer")), + List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock(), blockFactory.newConstantNullBlock(2))), + null, + false, + null, + false, + false + ) + ) { + assertThat( + Strings.toString(wrapAsToXContent(response), new ToXContent.MapParams(Map.of(DROP_NULL_COLUMNS_OPTION, "true"))), + equalTo("{" + """ + "all_columns":[{"name":"foo","type":"integer"},{"name":"all_null","type":"integer"}],""" + """ + "columns":[{"name":"foo","type":"integer"}],""" + """ + "values":[[40],[80]]}""") + ); + } + } + + /** + * This is a paranoid test to make sure the {@link Block}s produced by {@link Block.Builder} + * that contain only {@code null} entries are properly recognized by the {@link EsqlQueryResponse#DROP_NULL_COLUMNS_OPTION}. + */ + public void testNullColumnsFromBuilderXContentDropNulls() { + try (IntBlock.Builder b = blockFactory.newIntBlockBuilder(2)) { + b.appendNull(); + b.appendNull(); + try ( + EsqlQueryResponse response = new EsqlQueryResponse( + List.of(new ColumnInfo("foo", "integer"), new ColumnInfo("all_null", "integer")), + List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock(), b.build())), + null, + false, + null, + false, + false + ) + ) { + assertThat( + Strings.toString(wrapAsToXContent(response), new ToXContent.MapParams(Map.of(DROP_NULL_COLUMNS_OPTION, "true"))), + equalTo("{" + """ + "all_columns":[{"name":"foo","type":"integer"},{"name":"all_null","type":"integer"}],""" + """ + "columns":[{"name":"foo","type":"integer"}],""" + """ + "values":[[40],[80]]}""") + ); + } + } + } + private EsqlQueryResponse simple(boolean columnar) { return simple(columnar, false); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java index e357efe3fcc1f..605bfa7b05bff 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTestUtils.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; @@ -19,6 +18,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -88,28 +88,17 @@ public static IndexResolution analyzerExpandedDefaultMapping() { } public static EnrichResolution defaultEnrichResolution() { - EnrichPolicyResolution policyRes = loadEnrichPolicyResolution( - "languages", - "language_code", - "languages_idx", - "mapping-languages.json" - ); - return new EnrichResolution(Set.of(policyRes), Set.of("languages")); + return loadEnrichPolicyResolution("languages", "language_code", "languages_idx", "mapping-languages.json"); } - public static EnrichPolicyResolution loadEnrichPolicyResolution( - String policyName, - String matchField, - String idxName, - String mappingFile - ) { + public static EnrichResolution loadEnrichPolicyResolution(String policyName, String matchField, String idxName, String mappingFile) { IndexResolution mapping = loadMapping(mappingFile, idxName); List enrichFields = new ArrayList<>(mapping.get().mapping().keySet()); enrichFields.remove(matchField); - return new EnrichPolicyResolution( - policyName, - new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(idxName), matchField, enrichFields), - mapping - ); + EnrichPolicy policy = new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of(idxName), matchField, enrichFields); + EnrichResolution enrichResolution = new EnrichResolution(); + enrichResolution.addResolvedPolicy(policyName, policy, Map.of("", idxName), mapping.get().mapping()); + enrichResolution.addExistingPolicies(Set.of(policyName)); + return enrichResolution; } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 90e45a0a8b5a7..56ac25a3561af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.parser.ParsingException; @@ -1255,78 +1254,78 @@ public void testEmptyEsRelationOnCountStar() throws IOException { } public void testUnsupportedFieldsInStats() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | stats max(shape) + | stats max(unsupported) """, errorMsg); verifyUnsupported(""" from test - | stats max(int) by shape + | stats max(int) by unsupported """, errorMsg); verifyUnsupported(""" from test - | stats max(int) by bool, shape + | stats max(int) by bool, unsupported """, errorMsg); } public void testUnsupportedFieldsInEval() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | eval x = shape + | eval x = unsupported """, errorMsg); verifyUnsupported(""" from test - | eval foo = 1, x = shape + | eval foo = 1, x = unsupported """, errorMsg); verifyUnsupported(""" from test - | eval x = 1 + shape + | eval x = 1 + unsupported """, errorMsg); } public void testUnsupportedFieldsInWhere() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | where shape == "[1.0, 1.0]" + | where unsupported == "[1.0, 1.0]" """, errorMsg); verifyUnsupported(""" from test - | where int > 2 and shape == "[1.0, 1.0]" + | where int > 2 and unsupported == "[1.0, 1.0]" """, errorMsg); } public void testUnsupportedFieldsInSort() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | sort shape + | sort unsupported """, errorMsg); verifyUnsupported(""" from test - | sort int, shape + | sort int, unsupported """, errorMsg); } public void testUnsupportedFieldsInDissect() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | dissect shape \"%{foo}\" + | dissect unsupported \"%{foo}\" """, errorMsg); } public void testUnsupportedFieldsInGrok() { - var errorMsg = "Cannot use field [shape] with unsupported type [geo_shape]"; + var errorMsg = "Cannot use field [unsupported] with unsupported type [ip_range]"; verifyUnsupported(""" from test - | grok shape \"%{WORD:foo}\" + | grok unsupported \"%{WORD:foo}\" """, errorMsg); } @@ -1350,7 +1349,8 @@ public void testRegexOnInt() { public void testUnsupportedTypesWithToString() { // DATE_PERIOD and TIME_DURATION types have been added, but not really patched through the engine; i.e. supported. - final String supportedTypes = "boolean or cartesian_point or datetime or geo_point or ip or numeric or string or version"; + final String supportedTypes = + "boolean or cartesian_point or cartesian_shape or datetime or geo_point or geo_shape or ip or numeric or string or version"; verifyUnsupported( "row period = 1 year | eval to_string(period)", "line 1:28: argument of [to_string(period)] must be [" + supportedTypes + "], found value [period] type [date_period]" @@ -1359,7 +1359,10 @@ public void testUnsupportedTypesWithToString() { "row duration = 1 hour | eval to_string(duration)", "line 1:30: argument of [to_string(duration)] must be [" + supportedTypes + "], found value [duration] type [time_duration]" ); - verifyUnsupported("from test | eval to_string(shape)", "line 1:28: Cannot use field [shape] with unsupported type [geo_shape]"); + verifyUnsupported( + "from test | eval to_string(unsupported)", + "line 1:28: Cannot use field [unsupported] with unsupported type [ip_range]" + ); } public void testNonExistingEnrichPolicy() { @@ -1455,10 +1458,9 @@ public void testEnrichFieldsIncludeMatchField() { IndexResolution testIndex = loadMapping("mapping-basic.json", "test"); IndexResolution languageIndex = loadMapping("mapping-languages.json", "languages"); var enrichPolicy = new EnrichPolicy("match", null, List.of("unused"), "language_code", List.of("language_code", "language_name")); - EnrichResolution enrichResolution = new EnrichResolution( - Set.of(new EnrichPolicyResolution("languages", enrichPolicy, languageIndex)), - Set.of("languages") - ); + EnrichResolution enrichResolution = new EnrichResolution(); + enrichResolution.addResolvedPolicy("languages", enrichPolicy, Map.of("", "languages"), languageIndex.get().mapping()); + enrichResolution.addExistingPolicies(Set.of("languages")); AnalyzerContext context = new AnalyzerContext(configuration(query), new EsqlFunctionRegistry(), testIndex, enrichResolution); Analyzer analyzer = new Analyzer(context, TEST_VERIFIER); LogicalPlan plan = analyze(query, analyzer); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index e3ff92000ab21..ff34823aa6d88 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -130,6 +130,8 @@ public static Literal randomLiteral(DataType type) { case "version" -> randomVersion().toBytesRef(); case "geo_point" -> GEO.asWkb(GeometryTestUtils.randomPoint()); case "cartesian_point" -> CARTESIAN.asWkb(ShapeTestUtils.randomPoint()); + case "geo_shape" -> GEO.asWkb(GeometryTestUtils.randomGeometry(randomBoolean())); + case "cartesian_shape" -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(randomBoolean())); case "null" -> null; case "_source" -> { try { @@ -909,7 +911,9 @@ private static String typeErrorMessage(boolean includeOrdinal, List expectedValue.apply((BytesRef) n), warnings); } + /** + * Generate positive test cases for a unary function operating on an {@link EsqlDataTypes#GEO_SHAPE}. + */ + public static void forUnaryGeoShape( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + unary(suppliers, expectedEvaluatorToString, geoShapeCases(), expectedType, n -> expectedValue.apply((BytesRef) n), warnings); + } + + /** + * Generate positive test cases for a unary function operating on an {@link EsqlDataTypes#CARTESIAN_SHAPE}. + */ + public static void forUnaryCartesianShape( + List suppliers, + String expectedEvaluatorToString, + DataType expectedType, + Function expectedValue, + List warnings + ) { + unary(suppliers, expectedEvaluatorToString, cartesianShapeCases(), expectedType, n -> expectedValue.apply((BytesRef) n), warnings); + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#IP}. */ @@ -922,6 +948,26 @@ private static List cartesianPointCases() { ); } + private static List geoShapeCases() { + return List.of( + new TypedDataSupplier( + "", + () -> GEO.asWkb(GeometryTestUtils.randomGeometry(ESTestCase.randomBoolean())), + EsqlDataTypes.GEO_SHAPE + ) + ); + } + + private static List cartesianShapeCases() { + return List.of( + new TypedDataSupplier( + "", + () -> CARTESIAN.asWkb(ShapeTestUtils.randomGeometry(ESTestCase.randomBoolean())), + EsqlDataTypes.CARTESIAN_SHAPE + ) + ); + } + public static List ipCases() { return List.of( new TypedDataSupplier( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java new file mode 100644 index 0000000000000..961aaacab0423 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianShapeTests.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; + +public class ToCartesianShapeTests extends AbstractFunctionTestCase { + public ToCartesianShapeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + final String attribute = "Attribute[channel=0]"; + final Function evaluatorName = s -> "ToCartesianShape" + s + "Evaluator[field=" + attribute + "]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryCartesianShape(suppliers, attribute, EsqlDataTypes.CARTESIAN_SHAPE, v -> v, List.of()); + // random strings that don't look like a geo point + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("FromString"), + EsqlDataTypes.CARTESIAN_SHAPE, + bytesRef -> null, + bytesRef -> { + var exception = expectThrows(Exception.class, () -> CARTESIAN.wktToWkb(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: " + exception + ); + } + ); + // strings that are cartesian_shape representations + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("FromString"), + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + () -> new BytesRef(CARTESIAN.asWkt(GeometryTestUtils.randomGeometry(randomBoolean()))), + DataTypes.KEYWORD + ) + ), + EsqlDataTypes.CARTESIAN_SHAPE, + bytesRef -> CARTESIAN.wktToWkb(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToCartesianShape(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java new file mode 100644 index 0000000000000..dd9fcbd4951d7 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoShapeTests.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + +public class ToGeoShapeTests extends AbstractFunctionTestCase { + public ToGeoShapeTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + final String attribute = "Attribute[channel=0]"; + final Function evaluatorName = s -> "ToGeoShape" + s + "Evaluator[field=" + attribute + "]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryGeoShape(suppliers, attribute, EsqlDataTypes.GEO_SHAPE, v -> v, List.of()); + // random strings that don't look like a geo point + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("FromString"), + EsqlDataTypes.GEO_SHAPE, + bytesRef -> null, + bytesRef -> { + var exception = expectThrows(Exception.class, () -> GEO.wktToWkb(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: " + exception + ); + } + ); + // strings that are geo_shape representations + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("FromString"), + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + () -> new BytesRef(GEO.asWkt(GeometryTestUtils.randomGeometry(randomBoolean()))), + DataTypes.KEYWORD + ) + ), + EsqlDataTypes.GEO_SHAPE, + bytesRef -> GEO.wktToWkb(((BytesRef) bytesRef).utf8ToString()), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToGeoShape(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java index 918956de08648..9d5eed2ca2ebe 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringTests.java @@ -101,6 +101,20 @@ public static Iterable parameters() { wkb -> new BytesRef(CARTESIAN.wkbToWkt(wkb)), List.of() ); + TestCaseSupplier.forUnaryGeoShape( + suppliers, + "ToStringFromGeoShapeEvaluator[field=" + read + "]", + DataTypes.KEYWORD, + wkb -> new BytesRef(GEO.wkbToWkt(wkb)), + List.of() + ); + TestCaseSupplier.forUnaryCartesianShape( + suppliers, + "ToStringFromCartesianShapeEvaluator[field=" + read + "]", + DataTypes.KEYWORD, + wkb -> new BytesRef(CARTESIAN.wkbToWkt(wkb)), + List.of() + ); TestCaseSupplier.forUnaryIp( suppliers, "ToStringFromIPEvaluator[field=" + read + "]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index d2e7e924fb95c..ecedb00e65597 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -11,7 +11,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; -import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -415,7 +415,7 @@ protected static void geoPoints( DataType expectedDataType, BiFunction, Matcher> matcher ) { - points(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, expectedDataType, GEO, GeometryTestUtils::randomPoint, matcher); + spatial(cases, name, evaluatorName, EsqlDataTypes.GEO_POINT, expectedDataType, GEO, GeometryTestUtils::randomPoint, matcher); } /** @@ -443,7 +443,7 @@ protected static void cartesianPoints( DataType expectedDataType, BiFunction, Matcher> matcher ) { - points( + spatial( cases, name, evaluatorName, @@ -456,20 +456,68 @@ protected static void cartesianPoints( } /** - * Build many test cases with either {@code geo_point} or {@code cartesian_point} values. + * Build many test cases with {@code geo_shape} values that are converted to another type. + * This assumes that the function consumes {@code geo_shape} values and produces another type. + * For example, mv_count() can consume geo_shapes and produce an integer count. */ - protected static void points( + protected static void geoShape( + List cases, + String name, + String evaluatorName, + DataType expectedDataType, + BiFunction, Matcher> matcher + ) { + spatial( + cases, + name, + evaluatorName, + EsqlDataTypes.GEO_SHAPE, + expectedDataType, + GEO, + () -> GeometryTestUtils.randomGeometry(randomBoolean()), + matcher + ); + } + + /** + * Build many test cases with {@code cartesian_shape} values that are converted to another type. + * This assumes that the function consumes {@code cartesian_shape} values and produces another type. + * For example, mv_count() can consume cartesian shapes and produce an integer count. + */ + protected static void cartesianShape( + List cases, + String name, + String evaluatorName, + DataType expectedDataType, + BiFunction, Matcher> matcher + ) { + spatial( + cases, + name, + evaluatorName, + EsqlDataTypes.CARTESIAN_SHAPE, + expectedDataType, + CARTESIAN, + () -> ShapeTestUtils.randomGeometry(randomBoolean()), + matcher + ); + } + + /** + * Build many test cases for spatial values + */ + protected static void spatial( List cases, String name, String evaluatorName, DataType dataType, DataType expectedDataType, SpatialCoordinateTypes spatial, - Supplier randomPoint, + Supplier randomGeometry, BiFunction, Matcher> matcher ) { cases.add(new TestCaseSupplier(name + "(" + dataType.typeName() + ")", List.of(dataType), () -> { - BytesRef wkb = spatial.asWkb(randomPoint.get()); + BytesRef wkb = spatial.asWkb(randomGeometry.get()); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(wkb), dataType, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -479,7 +527,7 @@ protected static void points( })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { cases.add(new TestCaseSupplier(name + "(<" + dataType.typeName() + "s>) " + ordering, List.of(dataType), () -> { - List mvData = randomList(1, 100, () -> spatial.asWkb(randomPoint.get())); + List mvData = randomList(1, 100, () -> spatial.asWkb(randomGeometry.get())); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(mvData, dataType, "field")), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java index 1abbd62faa0bd..342baf405d0c3 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java @@ -39,6 +39,8 @@ public static Iterable parameters() { dateTimes(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); geoPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); cartesianPoints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + geoShape(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + cartesianShape(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java index 91c30b7c1f566..0f52efe20399e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java @@ -41,6 +41,8 @@ public static Iterable parameters() { dateTimes(cases, "mv_first", "MvFirst", DataTypes.DATETIME, (size, values) -> equalTo(values.findFirst().getAsLong())); geoPoints(cases, "mv_first", "MvFirst", EsqlDataTypes.GEO_POINT, (size, values) -> equalTo(values.findFirst().get())); cartesianPoints(cases, "mv_first", "MvFirst", EsqlDataTypes.CARTESIAN_POINT, (size, values) -> equalTo(values.findFirst().get())); + geoShape(cases, "mv_first", "MvFirst", EsqlDataTypes.GEO_SHAPE, (size, values) -> equalTo(values.findFirst().get())); + cartesianShape(cases, "mv_first", "MvFirst", EsqlDataTypes.CARTESIAN_SHAPE, (size, values) -> equalTo(values.findFirst().get())); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java index 7577cbf7dd0a8..41abab22c72ef 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java @@ -47,6 +47,14 @@ public static Iterable parameters() { EsqlDataTypes.CARTESIAN_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get()) ); + geoShape(cases, "mv_last", "MvLast", EsqlDataTypes.GEO_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); + cartesianShape( + cases, + "mv_last", + "MvLast", + EsqlDataTypes.CARTESIAN_SHAPE, + (size, values) -> equalTo(values.reduce((f, s) -> s).get()) + ); return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java index 37ab820146bf4..71aa945594584 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/AbstractBinaryComparisonTestCase.java @@ -94,8 +94,8 @@ protected final void validateType(BinaryOperator op, DataType lhsTyp equalTo( String.format( Locale.ROOT, - "first argument of [%s %s] must be [numeric, keyword, text, ip, datetime, version, geo_point or " - + "cartesian_point], found value [] type [%s]", + "first argument of [%s %s] must be [numeric, keyword, text, ip, datetime, version, geo_point, " + + "geo_shape, cartesian_point or cartesian_shape], found value [] type [%s]", lhsType.typeName(), rhsType.typeName(), lhsType.typeName() diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 5887d61c652bb..2716c4ff5195e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.analysis.Verifier; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -133,25 +132,17 @@ public void init() { physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); - var enrichResolution = new EnrichResolution( - Set.of( - new EnrichPolicyResolution( - "foo", - new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("idx"), "fld", List.of("a", "b")), - IndexResolution.valid( - new EsIndex( - "idx", - Map.ofEntries( - Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), - Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) - ) - ) - ) - ) - ), - Set.of("foo") + EnrichResolution enrichResolution = new EnrichResolution(); + enrichResolution.addResolvedPolicy( + "foo", + new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("idx"), "fld", List.of("a", "b")), + Map.of("", "idx"), + Map.ofEntries( + Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), + Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) + ) ); - + enrichResolution.addExistingPolicies(Set.of("foo")); analyzer = new Analyzer( new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), new Verifier(new Metrics()) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 6320294d7ee54..2a4cf459a7c32 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -15,8 +15,6 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; -import org.elasticsearch.xpack.esql.analysis.EnrichResolution; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -82,7 +80,6 @@ import java.util.List; import java.util.Map; -import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -128,19 +125,14 @@ public static void init() { IndexResolution getIndexResult = IndexResolution.valid(test); logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); - EnrichPolicyResolution policy = AnalyzerTestUtils.loadEnrichPolicyResolution( + var enrichResolution = AnalyzerTestUtils.loadEnrichPolicyResolution( "languages_idx", "id", "languages_idx", "mapping-languages.json" ); analyzer = new Analyzer( - new AnalyzerContext( - EsqlTestUtils.TEST_CFG, - new EsqlFunctionRegistry(), - getIndexResult, - new EnrichResolution(Set.of(policy), Set.of("languages_idx", "something")) - ), + new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), TEST_VERIFIER ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index c05e11d8d8a13..6a1bffe22cd7a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -164,25 +163,17 @@ public void init() { physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); - var enrichResolution = new EnrichResolution( - Set.of( - new EnrichPolicyResolution( - "foo", - new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("idx"), "fld", List.of("a", "b")), - IndexResolution.valid( - new EsIndex( - "idx", - Map.ofEntries( - Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), - Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) - ) - ) - ) - ) - ), - Set.of("foo") + EnrichResolution enrichResolution = new EnrichResolution(); + enrichResolution.addResolvedPolicy( + "foo", + new EnrichPolicy(EnrichPolicy.MATCH_TYPE, null, List.of("idx"), "fld", List.of("a", "b")), + Map.of("", "idx"), + Map.ofEntries( + Map.entry("a", new EsField("a", DataTypes.INTEGER, Map.of(), true)), + Map.entry("b", new EsField("b", DataTypes.LONG, Map.of(), true)) + ) ); - + enrichResolution.addExistingPolicies(Set.of("foo")); analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 931c96a8cb8ed..fc23a773effdf 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -54,6 +54,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.function.Function; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; @@ -683,6 +684,7 @@ public void testEnrich() { new Literal(EMPTY, "countries", KEYWORD), new EmptyAttribute(EMPTY), null, + Map.of(), List.of() ), processingCommand("enrich countries") @@ -696,6 +698,7 @@ public void testEnrich() { new Literal(EMPTY, "index-policy", KEYWORD), new UnresolvedAttribute(EMPTY, "field_underscore"), null, + Map.of(), List.of() ), processingCommand("enrich index-policy ON field_underscore") @@ -710,6 +713,7 @@ public void testEnrich() { new Literal(EMPTY, "countries", KEYWORD), new UnresolvedAttribute(EMPTY, "country_code"), null, + Map.of(), List.of() ), processingCommand("enrich [ccq.mode :" + mode.name() + "] countries ON country_code") diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 3ac1453e6ad8f..c1ef69a0bf7ca 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -120,6 +120,7 @@ private Matcher maxPageSizeMatcher(boolean estimatedRowSizeIsHuge, int private LocalExecutionPlanner planner() throws IOException { return new LocalExecutionPlanner( "test", + "", null, BigArrays.NON_RECYCLING_INSTANCE, TestBlockFactory.getNonBreakingInstance(), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index 1947249086568..37009c67e2c94 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; +import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.analysis.VerificationException; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.execution.PlanExecutor; @@ -54,14 +55,23 @@ public void shutdownThreadPool() throws Exception { terminate(threadPool); } + @SuppressWarnings("unchecked") + EnrichPolicyResolver mockEnrichResolver() { + EnrichPolicyResolver enrichResolver = mock(EnrichPolicyResolver.class); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new EnrichResolution()); + return null; + }).when(enrichResolver).resolvePolicy(any(), any()); + return enrichResolver; + } + public void testFailedMetric() { Client client = mock(Client.class); IndexResolver idxResolver = new IndexResolver(client, randomAlphaOfLength(10), EsqlDataTypeRegistry.INSTANCE, Set::of); var planExecutor = new PlanExecutor(idxResolver); String[] indices = new String[] { "test" }; - EnrichPolicyResolver enrichResolver = mock(EnrichPolicyResolver.class); - when(enrichResolver.allPolicyNames()).thenReturn(Set.of()); - + var enrichResolver = mockEnrichResolver(); // simulate a valid field_caps response so we can parse and correctly analyze de query FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class); when(fieldCapabilitiesResponse.getIndices()).thenReturn(indices); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java index c4350c8ec74d7..43dec76c7de24 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/tree/EsqlNodeSubclassTests.java @@ -11,8 +11,6 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.dissect.DissectParser; -import org.elasticsearch.xpack.core.enrich.EnrichPolicy; -import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; @@ -34,7 +32,6 @@ import org.elasticsearch.xpack.ql.expression.UnresolvedNamedExpression; import org.elasticsearch.xpack.ql.expression.UnresolvedStar; import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.tree.Node; import org.elasticsearch.xpack.ql.tree.NodeSubclassTests; @@ -86,20 +83,6 @@ protected Object pluggableMakeArg(Class> toBuildClass, Class li return; } - var hits = searchResponse.getHits().getHits(); - delegate.onResponse(UnparsedModel.unparsedModelFromMap(createModelConfigMap(hits, modelId))); + delegate.onResponse(UnparsedModel.unparsedModelFromMap(createModelConfigMap(searchResponse.getHits(), modelId))); }); QueryBuilder queryBuilder = documentIdQuery(modelId); @@ -132,8 +132,7 @@ public void getModel(String modelId, ActionListener listener) { return; } - var hits = searchResponse.getHits().getHits(); - var modelConfigs = parseHitsAsModels(hits).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); assert modelConfigs.size() == 1; delegate.onResponse(modelConfigs.get(0)); }); @@ -162,8 +161,7 @@ public void getModelsByTaskType(TaskType taskType, ActionListener> listener) { return; } - var hits = searchResponse.getHits().getHits(); - var modelConfigs = parseHitsAsModels(hits).stream().map(UnparsedModel::unparsedModelFromMap).toList(); + var modelConfigs = parseHitsAsModels(searchResponse.getHits()).stream().map(UnparsedModel::unparsedModelFromMap).toList(); delegate.onResponse(modelConfigs); }); @@ -212,7 +209,7 @@ public void getAllModels(ActionListener> listener) { client.search(modelSearch, searchListener); } - private List parseHitsAsModels(SearchHit[] hits) { + private List parseHitsAsModels(SearchHits hits) { var modelConfigs = new ArrayList(); for (var hit : hits) { modelConfigs.add(new ModelConfigMap(hit.getSourceAsMap(), Map.of())); @@ -220,8 +217,8 @@ private List parseHitsAsModels(SearchHit[] hits) { return modelConfigs; } - private ModelConfigMap createModelConfigMap(SearchHit[] hits, String modelId) { - Map mappedHits = Arrays.stream(hits).collect(Collectors.toMap(hit -> { + private ModelConfigMap createModelConfigMap(SearchHits hits, String modelId) { + Map mappedHits = Arrays.stream(hits.getHits()).collect(Collectors.toMap(hit -> { if (hit.getIndex().startsWith(InferenceIndex.INDEX_NAME)) { return InferenceIndex.INDEX_NAME; } diff --git a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java index f75dd2926059a..f595153e4d6dd 100644 --- a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java +++ b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java @@ -222,18 +222,18 @@ protected void } private SearchHits prepareSearchHits() { - SearchHit hit1 = new SearchHit(0, "1"); + SearchHit hit1 = SearchHit.unpooled(0, "1"); hit1.score(1f); hit1.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); - SearchHit hit2 = new SearchHit(0, "2"); + SearchHit hit2 = SearchHit.unpooled(0, "2"); hit2.score(1f); hit2.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); - SearchHit hit3 = new SearchHit(0, "3*"); + SearchHit hit3 = SearchHit.unpooled(0, "3*"); hit3.score(1f); hit3.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); - return new SearchHits(new SearchHit[] { hit1, hit2, hit3 }, new TotalHits(3L, TotalHits.Relation.EQUAL_TO), 1f); + return SearchHits.unpooled(new SearchHit[] { hit1, hit2, hit3 }, new TotalHits(3L, TotalHits.Relation.EQUAL_TO), 1f); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index 5f2f7cfe491ca..f9213a7fcaeb8 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -10,12 +10,12 @@ import org.elasticsearch.action.admin.cluster.snapshots.features.ResetFeatureStateRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; import org.elasticsearch.action.datastreams.CreateDataStreamAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterModule; @@ -308,7 +308,7 @@ protected PutFilterAction.Response putMlFilter(MlFilter filter) { protected static List fetchAllAuditMessages(String jobId) throws Exception { RefreshRequest refreshRequest = new RefreshRequest(NotificationsIndex.NOTIFICATIONS_INDEX); - RefreshResponse refreshResponse = client().execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); + BroadcastResponse refreshResponse = client().execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); assertThat(refreshResponse.getStatus().getStatus(), anyOf(equalTo(200), equalTo(201))); SearchRequest searchRequest = new SearchRequestBuilder(client()).setIndices(NotificationsIndex.NOTIFICATIONS_INDEX) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index a5c47524b6934..f28f6eff25b04 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -51,7 +51,6 @@ import static org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; public class ModelSnapshotRetentionIT extends MlNativeAutodetectIntegTestCase { @@ -191,8 +190,7 @@ private List getAvailableModelStateDocIds() throws Exception { private List getDocIdsFromSearch(SearchRequest searchRequest) throws Exception { List docIds = new ArrayList<>(); assertResponse(client().execute(TransportSearchAction.TYPE, searchRequest), searchResponse -> { - assertThat(searchResponse.getHits(), notNullValue()); - for (SearchHit searchHit : searchResponse.getHits().getHits()) { + for (SearchHit searchHit : searchResponse.getHits()) { docIds.add(searchHit.getId()); } }); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java index 51f6243778517..ffe70d9747a56 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelProviderIT.java @@ -7,12 +7,12 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.license.License; @@ -109,7 +109,7 @@ public void testGetTrainedModelConfig() throws Exception { ); assertThat(exceptionHolder.get(), is(nullValue())); - AtomicReference refreshResponseAtomicReference = new AtomicReference<>(); + AtomicReference refreshResponseAtomicReference = new AtomicReference<>(); blockingCall( listener -> trainedModelProvider.refreshInferenceIndex(listener), refreshResponseAtomicReference, @@ -198,7 +198,7 @@ public void testGetTrainedModelConfigWithMultiDocDefinition() throws Exception { ); blockingCall( listener -> trainedModelProvider.refreshInferenceIndex(listener), - new AtomicReference(), + new AtomicReference(), new AtomicReference<>() ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 09cb8644dba4f..152d8fde8c86c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -32,6 +32,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -68,6 +70,7 @@ import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; +import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.ShutdownAwarePlugin; @@ -753,6 +756,7 @@ public void loadExtensions(ExtensionLoader loader) { public static final int MAX_LOW_PRIORITY_MODELS_PER_NODE = 100; private static final Logger logger = LogManager.getLogger(MachineLearning.class); + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MachineLearning.class); private final Settings settings; private final boolean enabled; @@ -919,6 +923,15 @@ public Collection createComponents(PluginServices services) { return List.of(new JobManagerHolder(), new MachineLearningExtensionHolder()); } + if ("darwin-x86_64".equals(Platforms.PLATFORM_NAME)) { + String msg = "The machine learning plugin will be permanently disabled on macOS x86_64 in new minor versions released " + + "from December 2024 onwards. To continue to use machine learning functionality on macOS please switch to an arm64 " + + "machine (Apple silicon). Alternatively, it will still be possible to run Elasticsearch with machine learning " + + "enabled in a Docker container on macOS x86_64."; + logger.warn(msg); + deprecationLogger.warn(DeprecationCategory.PLUGINS, "ml-darwin-x86_64", msg); + } + machineLearningExtension.get().configure(environment.settings()); this.mlUpgradeModeActionFilter.set(new MlUpgradeModeActionFilter(clusterService)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java index 8ce41262a1e1d..bcf3c1f58cfa9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -115,7 +115,7 @@ protected void masterOperation( } // package-private for testing - static void cancelDownloadTask(Client client, String modelId, ActionListener listener, TimeValue timeout) { + static void cancelDownloadTask(Client client, String modelId, ActionListener listener, TimeValue timeout) { logger.debug(() -> format("[%s] Checking if download task exists and cancelling it", modelId)); OriginSettingClient mlClient = new OriginSettingClient(client, ML_ORIGIN); @@ -283,11 +283,11 @@ private static void executeTaskCancellation( Client client, String modelId, TaskInfo taskInfo, - ActionListener listener, + ActionListener listener, TimeValue timeout ) { if (taskInfo != null) { - ActionListener cancelListener = ActionListener.wrap(listener::onResponse, e -> { + ActionListener cancelListener = ActionListener.wrap(listener::onResponse, e -> { Throwable cause = ExceptionsHelper.unwrapCause(e); if (cause instanceof ResourceNotFoundException) { logger.debug(() -> format("[%s] Task no longer exists when attempting to cancel it", modelId)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index 4cfcf6509faa0..be8a098ed3986 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.sort.SortOrder; @@ -126,8 +127,7 @@ protected InputStream initScroll(long startTimestamp) throws IOException { logger.debug("[{}] Search response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); scrollId = searchResponse.getScrollId(); - SearchHit hits[] = searchResponse.getHits().getHits(); - return processAndConsumeSearchHits(hits); + return processAndConsumeSearchHits(searchResponse.getHits()); } finally { searchResponse.decRef(); } @@ -184,9 +184,9 @@ private SearchRequestBuilder buildSearchRequest(long start) { /** * IMPORTANT: This is not an idempotent method. This method changes the input array by setting each element to null. */ - private InputStream processAndConsumeSearchHits(SearchHit hits[]) throws IOException { + private InputStream processAndConsumeSearchHits(SearchHits hits) throws IOException { - if (hits == null || hits.length == 0) { + if (hits.getHits().length == 0) { hasNext = false; clearScroll(); return null; @@ -194,11 +194,10 @@ private InputStream processAndConsumeSearchHits(SearchHit hits[]) throws IOExcep BytesStreamOutput outputStream = new BytesStreamOutput(); - SearchHit lastHit = hits[hits.length - 1]; + SearchHit lastHit = hits.getAt(hits.getHits().length - 1); lastTimestamp = context.extractedFields.timeFieldValue(lastHit); try (SearchHitToJsonProcessor hitProcessor = new SearchHitToJsonProcessor(context.extractedFields, outputStream)) { - for (int i = 0; i < hits.length; i++) { - SearchHit hit = hits[i]; + for (SearchHit hit : hits) { if (isCancelled) { Long timestamp = context.extractedFields.timeFieldValue(hit); if (timestamp != null) { @@ -212,9 +211,6 @@ private InputStream processAndConsumeSearchHits(SearchHit hits[]) throws IOExcep } } hitProcessor.process(hit); - // hack to remove the reference from object. This object can be big and consume alot of memory. - // We are removing it as soon as we process it. - hits[i] = null; } } return outputStream.bytes().streamInput(); @@ -237,8 +233,7 @@ private InputStream continueScroll() throws IOException { logger.debug("[{}] Search response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); scrollId = searchResponse.getScrollId(); - SearchHit hits[] = searchResponse.getHits().getHits(); - return processAndConsumeSearchHits(hits); + return processAndConsumeSearchHits(searchResponse.getHits()); } finally { if (searchResponse != null) { searchResponse.decRef(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index 4119b23747fcb..c890ab599c380 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; @@ -154,11 +155,11 @@ public void preview(ActionListener> listener) { return; } - final SearchHit[] hits = searchResponse.getHits().getHits(); - List rows = new ArrayList<>(hits.length); - for (SearchHit hit : hits) { - String[] extractedValues = extractValues(hit); - rows.add(extractedValues == null ? new Row(null, hit, true) : new Row(extractedValues, hit, false)); + List rows = new ArrayList<>(searchResponse.getHits().getHits().length); + for (SearchHit hit : searchResponse.getHits().getHits()) { + var unpooled = hit.asUnpooled(); + String[] extractedValues = extractValues(unpooled); + rows.add(extractedValues == null ? new Row(null, unpooled, true) : new Row(extractedValues, unpooled, false)); } delegate.onResponse(rows); }) @@ -251,8 +252,8 @@ private List processSearchResponse(SearchResponse searchResponse) { return null; } - SearchHit[] hits = searchResponse.getHits().getHits(); - List rows = new ArrayList<>(hits.length); + SearchHits hits = searchResponse.getHits(); + List rows = new ArrayList<>(hits.getHits().length); for (SearchHit hit : hits) { if (isCancelled) { hasNext = false; @@ -317,12 +318,13 @@ private String[] extractProcessedValue(ProcessedField processedField, SearchHit } private Row createRow(SearchHit hit) { - String[] extractedValues = extractValues(hit); + var unpooled = hit.asUnpooled(); + String[] extractedValues = extractValues(unpooled); if (extractedValues == null) { - return new Row(null, hit, true); + return new Row(null, unpooled, true); } boolean isTraining = trainTestSplitter.get().isTraining(extractedValues); - Row row = new Row(extractedValues, hit, isTraining); + Row row = new Row(extractedValues, unpooled, isTraining); LOGGER.trace( () -> format( "[%s] Extracted row: sort key = [%s], is_training = [%s], values = %s", diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java index bd37706622187..9e2db58befdbf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/TestDocsIterator.java @@ -61,7 +61,7 @@ protected FieldSortBuilder sortField() { @Override protected SearchHit map(SearchHit hit) { - return hit; + return hit.asUnpooled(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java index 7eef0e526eac3..2012ca87578b0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersister.java @@ -12,7 +12,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.license.License; @@ -157,7 +157,7 @@ private CountDownLatch storeTrainedModelDoc(TrainedModelDefinitionDoc trainedMod CountDownLatch latch = new CountDownLatch(1); // Latch is attached to this action as it is the last one to execute. - ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { + ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { if (refreshed != null) { LOGGER.debug(() -> "[" + analytics.getId() + "] refreshed inference index after model store"); } @@ -210,7 +210,7 @@ private CountDownLatch storeTrainedModelMetadata(TrainedModelMetadata trainedMod CountDownLatch latch = new CountDownLatch(1); // Latch is attached to this action as it is the last one to execute. - ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { + ActionListener refreshListener = new LatchedActionListener<>(ActionListener.wrap(refreshed -> { if (refreshed != null) { LOGGER.debug(() -> "[" + analytics.getId() + "] refreshed inference index after model metadata store"); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java index 1b6818a8727f3..0c693ff2d34f4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AbstractDataFrameAnalyticsStep.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.tasks.TaskId; @@ -76,7 +76,7 @@ public final void execute(ActionListener listener) { protected abstract void doExecute(ActionListener listener); - protected void refreshDestAsync(ActionListener refreshListener) { + protected void refreshDestAsync(ActionListener refreshListener) { ParentTaskAssigningClient parentTaskClient = parentTaskClient(); executeWithHeadersAsync( config.getHeaders(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java index 8adf5b3f0621a..9e56387ed773e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/AnalysisStep.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.dataframe.steps; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; @@ -63,7 +63,7 @@ protected void doExecute(ActionListener listener) { listener::onFailure ); - ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { + ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { // TODO This could fail with errors. In that case we get stuck with the copied index. // We could delete the index in case of failure or we could try building the factory before reindexing // to catch the error early on. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java index 7b27090dc302d..dbf1f3e7be3d9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/FinalStep.java @@ -13,10 +13,10 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContent; @@ -60,7 +60,7 @@ public Name name() { @Override protected void doExecute(ActionListener listener) { - ActionListener refreshListener = ActionListener.wrap( + ActionListener refreshListener = ActionListener.wrap( refreshResponse -> listener.onResponse(new StepResponse(false)), listener::onFailure ); @@ -89,7 +89,7 @@ private void indexDataCounts(ActionListener listener) { } } - private void refreshIndices(ActionListener listener) { + private void refreshIndices(ActionListener listener) { RefreshRequest refreshRequest = new RefreshRequest( AnomalyDetectorsIndex.jobStateIndexPattern(), MlStatsIndex.indexPattern(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index 65ac2b678d93b..ad005e6d9ae6c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -11,9 +11,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; @@ -85,7 +85,7 @@ protected void doExecute(ActionListener listener) { } }, listener::onFailure); - ActionListener refreshDestListener = ActionListener.wrap( + ActionListener refreshDestListener = ActionListener.wrap( refreshResponse -> searchIfTestDocsExist(testDocsExistListener), listener::onFailure ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java index 1ca78df1fad3d..0ccdd1eb64601 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/ReindexingStep.java @@ -12,8 +12,8 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -278,7 +278,7 @@ public void cancel(String reason, TimeValue timeout) { // We need to cancel the reindexing task within context with ML origin as we started the task // from the same context - CancelTasksResponse cancelReindexResponse = cancelTaskWithinMlOriginContext(cancelReindex); + ListTasksResponse cancelReindexResponse = cancelTaskWithinMlOriginContext(cancelReindex); Throwable firstError = null; if (cancelReindexResponse.getNodeFailures().isEmpty() == false) { @@ -296,7 +296,7 @@ public void cancel(String reason, TimeValue timeout) { } } - private CancelTasksResponse cancelTaskWithinMlOriginContext(CancelTasksRequest cancelTasksRequest) { + private ListTasksResponse cancelTaskWithinMlOriginContext(CancelTasksRequest cancelTasksRequest) { final ThreadContext threadContext = client.threadPool().getThreadContext(); try (ThreadContext.StoredContext ignore = threadContext.stashWithOrigin(ML_ORIGIN)) { return client.admin().cluster().cancelTasks(cancelTasksRequest).actionGet(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java index 068462bcdfca2..4e3fa3addaf30 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorer.java @@ -55,6 +55,15 @@ public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext r if (ltrRescoreContext.regressionModelDefinition == null) { throw new IllegalStateException("local model reference is null, missing rewriteAndFetch before rescore phase?"); } + + if (rescoreContext.getWindowSize() < topDocs.scoreDocs.length) { + throw new IllegalArgumentException( + "Rescore window is too small and should be at least the value of from + size but was [" + + rescoreContext.getWindowSize() + + "]" + ); + } + LocalModel definition = ltrRescoreContext.regressionModelDefinition; // First take top slice of incoming docs, to be rescored: diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java index 11676cc4a1599..a5a7859a7f938 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilder.java @@ -32,10 +32,10 @@ public class LearningToRankRescorerBuilder extends RescorerBuilder { - public static final String NAME = "learning_to_rank"; - private static final ParseField MODEL_FIELD = new ParseField("model_id"); - private static final ParseField PARAMS_FIELD = new ParseField("params"); - private static final ObjectParser PARSER = new ObjectParser<>(NAME, false, Builder::new); + public static final ParseField NAME = new ParseField("learning_to_rank"); + public static final ParseField MODEL_FIELD = new ParseField("model_id"); + public static final ParseField PARAMS_FIELD = new ParseField("params"); + private static final ObjectParser PARSER = new ObjectParser<>(NAME.getPreferredName(), false, Builder::new); static { PARSER.declareString(Builder::setModelId, MODEL_FIELD); @@ -251,7 +251,7 @@ protected LearningToRankRescorerContext innerBuildContext(int windowSize, Search @Override public String getWriteableName() { - return NAME; + return NAME.getPreferredName(); } @Override @@ -260,6 +260,11 @@ public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); } + @Override + protected boolean isWindowSizeRequired() { + return true; + } + @Override protected void doWriteTo(StreamOutput out) throws IOException { assert localModel == null || rescoreOccurred : "Unnecessarily populated local model object"; @@ -270,7 +275,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); + builder.startObject(NAME.getPreferredName()); builder.field(MODEL_FIELD.getPreferredName(), modelId); if (this.params != null) { builder.field(PARAMS_FIELD.getPreferredName(), this.params); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java index 11b699df66b83..0bfc64c9b0027 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/Vocabulary.java @@ -69,7 +69,7 @@ public Vocabulary(StreamInput in) throws IOException { } else { merges = List.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { scores = in.readCollectionAsList(StreamInput::readDouble); } else { scores = List.of(); @@ -95,7 +95,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_2_0)) { out.writeStringCollection(merges); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeCollection(scores, StreamOutput::writeDouble); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index d267966a1d795..b502e0d6db341 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -30,6 +29,7 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.Numbers; @@ -52,6 +52,7 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.Sum; @@ -419,7 +420,7 @@ public void getTrainedModelMetadata( })); } - public void refreshInferenceIndex(ActionListener listener) { + public void refreshInferenceIndex(ActionListener listener) { executeAsyncWithOrigin( client, ML_ORIGIN, @@ -663,7 +664,7 @@ public void getTrainedModel( ActionListener trainedModelSearchHandler = ActionListener.wrap(modelSearchResponse -> { TrainedModelConfig.Builder builder; try { - builder = handleHits(modelSearchResponse.getHits().getHits(), modelId, this::parseModelConfigLenientlyFromSource).get(0); + builder = handleHits(modelSearchResponse.getHits(), modelId, this::parseModelConfigLenientlyFromSource).get(0); } catch (ResourceNotFoundException ex) { getTrainedModelListener.onFailure( new ResourceNotFoundException(Messages.getMessage(Messages.INFERENCE_NOT_FOUND, modelId)) @@ -701,7 +702,7 @@ public void getTrainedModel( ActionListener.wrap(definitionSearchResponse -> { try { List docs = handleHits( - definitionSearchResponse.getHits().getHits(), + definitionSearchResponse.getHits(), modelId, (bytes, resourceId) -> ChunkedTrainedModelRestorer.parseModelDefinitionDocLenientlyFromSource( bytes, @@ -1268,15 +1269,15 @@ private static Set matchedResourceIds(String[] tokens) { } private static List handleHits( - SearchHit[] hits, + SearchHits hits, String resourceId, CheckedBiFunction parseLeniently ) throws Exception { - if (hits.length == 0) { + if (hits.getHits().length == 0) { throw new ResourceNotFoundException(resourceId); } - List results = new ArrayList<>(hits.length); - String initialIndex = hits[0].getIndex(); + List results = new ArrayList<>(hits.getHits().length); + String initialIndex = hits.getAt(0).getIndex(); for (SearchHit hit : hits) { // We don't want to spread across multiple backing indices if (hit.getIndex().equals(initialIndex)) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index ac16948e32ed6..577bbe3dac6ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse; @@ -23,6 +22,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportMultiSearchAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -451,7 +451,7 @@ private void deleteResultsByQuery( ) { assert indices.length > 0; - ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { + ActionListener refreshListener = ActionListener.wrap(refreshResponse -> { logger.info("[{}] running delete by query on [{}]", jobId, String.join(", ", indices)); ConstantScoreQueryBuilder query = new ConstantScoreQueryBuilder(new TermQueryBuilder(Job.ID.getPreferredName(), jobId)); DeleteByQueryRequest request = new DeleteByQueryRequest(indices).setQuery(query) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java index 92ceb536cfd43..29a8a35ff0fdd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteForecastActionTests.java @@ -86,7 +86,7 @@ private static SearchHit createForecastStatsHit(ForecastRequestStats.ForecastReq ForecastRequestStats.STATUS.getPreferredName(), new DocumentField(ForecastRequestStats.STATUS.getPreferredName(), Collections.singletonList(status.toString())) ); - SearchHit hit = new SearchHit(0, ""); + SearchHit hit = SearchHit.unpooled(0, ""); hit.addDocumentFields(documentFields, Map.of()); return hit; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java index 4f1a99f634a0a..feb35195e3e38 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelActionTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.action.support.PlainActionFuture; @@ -57,7 +56,7 @@ public void tearDownThreadPool() { public void testCancelDownloadTaskCallsListenerWithNullWhenNoTasksExist() { var client = mockClientWithTasksResponse(Collections.emptyList(), threadPool); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); cancelDownloadTask(client, "modelId", listener, TIMEOUT); @@ -70,13 +69,13 @@ public void testCancelDownloadTaskCallsOnFailureWithErrorWhenCancellingFailsWith doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new Exception("cancel error")); return Void.TYPE; }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); cancelDownloadTask(client, "modelId", listener, TIMEOUT); @@ -91,13 +90,13 @@ public void testCancelDownloadTaskCallsOnResponseNullWhenTheTaskNoLongerExistsWh doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onFailure(new ResourceNotFoundException("task no longer there")); return Void.TYPE; }).when(client).execute(same(CancelTasksAction.INSTANCE), any(), any()); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); cancelDownloadTask(client, "modelId", listener, TIMEOUT); @@ -115,7 +114,7 @@ public void testCancelDownloadTasksCallsGetsUnableToRetrieveTaskInfoError() { return Void.TYPE; }).when(client).execute(same(TransportListTasksAction.TYPE), any(), any()); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); cancelDownloadTask(client, "modelId", listener, TIMEOUT); @@ -127,10 +126,10 @@ public void testCancelDownloadTasksCallsGetsUnableToRetrieveTaskInfoError() { public void testCancelDownloadTaskCallsOnResponseWithTheCancelResponseWhenATaskExists() { var client = mockClientWithTasksResponse(getTaskInfoListOfOne(), threadPool); - var cancelResponse = mock(CancelTasksResponse.class); + var cancelResponse = mock(ListTasksResponse.class); mockCancelTasksResponse(client, cancelResponse); - var listener = new PlainActionFuture(); + var listener = new PlainActionFuture(); cancelDownloadTask(client, "modelId", listener, TIMEOUT); @@ -142,12 +141,12 @@ private static void mockCancelTask(Client client) { when(cluster.prepareCancelTasks()).thenReturn(new CancelTasksRequestBuilder(client)); } - private static void mockCancelTasksResponse(Client client, CancelTasksResponse response) { + private static void mockCancelTasksResponse(Client client, ListTasksResponse response) { mockCancelTask(client); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[2]; listener.onResponse(response); return Void.TYPE; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java index 12ce45a186d62..4bbaafa9db0cd 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java @@ -555,7 +555,8 @@ private SearchResponse createSearchResponse(long totalHits, long earliestTime, l SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.status()).thenReturn(RestStatus.OK); SearchHit[] hits = new SearchHit[(int) totalHits]; - SearchHits searchHits = new SearchHits(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1); + Arrays.fill(hits, SearchHit.unpooled(1)); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1); when(searchResponse.getHits()).thenReturn(searchHits); List aggs = new ArrayList<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java index bf7aa465ee604..2dd17e434cccb 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java @@ -547,7 +547,8 @@ private SearchResponse createSearchResponse(List timestamps, List hits.add(hit); } SearchHits searchHits = new SearchHits(hits.toArray(SearchHits.EMPTY), new TotalHits(hits.size(), TotalHits.Relation.EQUAL_TO), 1); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); when(searchResponse.getTook()).thenReturn(TimeValue.timeValueMillis(randomNonNegativeLong())); return searchResponse; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java index 63afc4ef6659c..8d8cded819e23 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsTaskTests.java @@ -243,10 +243,12 @@ public void testPersistProgress_ProgressDocumentCreated() throws IOException { } public void testPersistProgress_ProgressDocumentUpdated() throws IOException { - testPersistProgress( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), - ".ml-state-dummy" - ); + var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + try { + testPersistProgress(hits, ".ml-state-dummy"); + } finally { + hits.decRef(); + } } public void testSetFailed() throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index 7bc3d507ecf22..993e00bd4adf4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -654,7 +654,8 @@ private SearchResponse createSearchResponse(List field1Values, List buildSearchHits(List> vals) { - return vals.stream() - .map(InferenceRunnerTests::fromMap) - .map(reference -> SearchHit.createFromMap(Collections.singletonMap("_source", reference))) - .collect(Collectors.toCollection(ArrayDeque::new)); + return vals.stream().map(InferenceRunnerTests::fromMap).map(reference -> { + var pooled = SearchHit.createFromMap(Collections.singletonMap("_source", reference)); + try { + return pooled.asUnpooled(); + } finally { + pooled.decRef(); + } + }).collect(Collectors.toCollection(ArrayDeque::new)); } private static BytesReference fromMap(Map map) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java index d9176b74d2d3f..c308f95d483a5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/ChunkedTrainedModelPersisterTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.ml.dataframe.process; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.license.License; import org.elasticsearch.test.ESTestCase; @@ -102,7 +102,7 @@ public void testPersistAllDocs() { }).when(trainedModelProvider).storeTrainedModelMetadata(any(TrainedModelMetadata.class), any(ActionListener.class)); doAnswer(invocationOnMock -> { - ActionListener storeListener = (ActionListener) invocationOnMock.getArguments()[0]; + ActionListener storeListener = (ActionListener) invocationOnMock.getArguments()[0]; storeListener.onResponse(null); return null; }).when(trainedModelProvider).refreshInferenceIndex(any(ActionListener.class)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java index 99dfd9e919a6a..3a95a3bb65f10 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java @@ -309,7 +309,7 @@ private void givenDataFrameBatches(List> batche } private static SearchHit newHit(String json) { - SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10)); + SearchHit hit = SearchHit.unpooled(randomInt(), randomAlphaOfLength(10)); hit.sourceRef(new BytesArray(json)); return hit; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java index 79044a465442b..f52d05fc3220d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerBuilderSerializationTests.java @@ -9,14 +9,19 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.rescore.RescorerBuilder; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfig; @@ -25,48 +30,36 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; -import static org.elasticsearch.search.rank.RankBuilder.WINDOW_SIZE_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.LearningToRankConfigTests.randomLearningToRankConfig; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class LearningToRankRescorerBuilderSerializationTests extends AbstractBWCSerializationTestCase { private static LearningToRankService learningToRankService = mock(LearningToRankService.class); - @Override - protected LearningToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { - String fieldName = null; - LearningToRankRescorerBuilder rescorer = null; - Integer windowSize = null; - XContentParser.Token token = parser.nextToken(); - assert token == XContentParser.Token.START_OBJECT; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token.isValue()) { - if (WINDOW_SIZE_FIELD.match(fieldName, parser.getDeprecationHandler())) { - windowSize = parser.intValue(); - } else { - throw new ParsingException(parser.getTokenLocation(), "rescore doesn't support [" + fieldName + "]"); + public void testRequiredWindowSize() throws IOException { + for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { + LearningToRankRescorerBuilder testInstance = createTestInstance(); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.startObject(); + testInstance.doXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + + try (XContentParser parser = JsonXContent.jsonXContent.createParser(parserConfig(), Strings.toString(builder))) { + ParsingException e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser, (r) -> {})); + assertThat(e.getMessage(), equalTo("window_size is required for rescorer of type [learning_to_rank]")); } - } else if (token == XContentParser.Token.START_OBJECT) { - rescorer = LearningToRankRescorerBuilder.fromXContent(parser, learningToRankService); - } else { - throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "] after [" + fieldName + "]"); } } - if (rescorer == null) { - throw new ParsingException(parser.getTokenLocation(), "missing rescore type"); - } - if (windowSize != null) { - rescorer.windowSize(windowSize); - } - return rescorer; + } + + @Override + protected LearningToRankRescorerBuilder doParseInstance(XContentParser parser) throws IOException { + return (LearningToRankRescorerBuilder) RescorerBuilder.parseFromXContent(parser, (r) -> {}); } @Override @@ -85,76 +78,49 @@ protected LearningToRankRescorerBuilder createTestInstance() { learningToRankService ); - if (randomBoolean()) { - builder.windowSize(randomIntBetween(1, 10000)); - } + builder.windowSize(randomIntBetween(1, 10000)); return builder; } @Override protected LearningToRankRescorerBuilder createXContextTestInstance(XContentType xContentType) { - return new LearningToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learningToRankService); + return new LearningToRankRescorerBuilder(randomAlphaOfLength(10), randomBoolean() ? randomParams() : null, learningToRankService) + .windowSize(randomIntBetween(1, 10000)); } @Override protected LearningToRankRescorerBuilder mutateInstance(LearningToRankRescorerBuilder instance) throws IOException { - int i = randomInt(4); return switch (i) { - case 0 -> { - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - randomValueOtherThan(instance.modelId(), () -> randomAlphaOfLength(10)), - instance.params(), - learningToRankService - ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize()); - } - yield builder; - } + case 0 -> new LearningToRankRescorerBuilder( + randomValueOtherThan(instance.modelId(), () -> randomAlphaOfLength(10)), + instance.params(), + learningToRankService + ).windowSize(instance.windowSize()); case 1 -> new LearningToRankRescorerBuilder(instance.modelId(), instance.params(), learningToRankService).windowSize( randomValueOtherThan(instance.windowSize(), () -> randomIntBetween(1, 10000)) ); - case 2 -> { - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - instance.modelId(), - randomValueOtherThan(instance.params(), () -> (randomBoolean() ? randomParams() : null)), - learningToRankService - ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize() + 1); - } - yield builder; - } + case 2 -> new LearningToRankRescorerBuilder( + instance.modelId(), + randomValueOtherThan(instance.params(), () -> (randomBoolean() ? randomParams() : null)), + learningToRankService + ).windowSize(instance.windowSize()); case 3 -> { LearningToRankConfig learningToRankConfig = randomValueOtherThan( instance.learningToRankConfig(), () -> randomLearningToRankConfig() ); - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - instance.modelId(), - learningToRankConfig, - null, - learningToRankService + yield new LearningToRankRescorerBuilder(instance.modelId(), learningToRankConfig, null, learningToRankService).windowSize( + instance.windowSize() ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize()); - } - yield builder; - } - case 4 -> { - LearningToRankRescorerBuilder builder = new LearningToRankRescorerBuilder( - mock(LocalModel.class), - instance.learningToRankConfig(), - instance.params(), - learningToRankService - ); - if (instance.windowSize() != null) { - builder.windowSize(instance.windowSize()); - } - yield builder; } + case 4 -> new LearningToRankRescorerBuilder( + mock(LocalModel.class), + instance.learningToRankConfig(), + instance.params(), + learningToRankService + ).windowSize(instance.windowSize()); default -> throw new AssertionError("Unexpected random test case"); }; } @@ -169,31 +135,38 @@ protected NamedXContentRegistry xContentRegistry() { List namedXContent = new ArrayList<>(); namedXContent.addAll(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); - namedXContent.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + namedXContent.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedXContents()); + namedXContent.add( + new NamedXContentRegistry.Entry( + RescorerBuilder.class, + LearningToRankRescorerBuilder.NAME, + (p, c) -> LearningToRankRescorerBuilder.fromXContent(p, learningToRankService) + ) + ); return new NamedXContentRegistry(namedXContent); } + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return writableRegistry(); + } + @Override protected NamedWriteableRegistry writableRegistry() { List namedWriteables = new ArrayList<>(new MlInferenceNamedXContentProvider().getNamedWriteables()); namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); - namedWriteables.addAll(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); + namedWriteables.addAll(new SearchModule(Settings.EMPTY, List.of()).getNamedWriteables()); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + RescorerBuilder.class, + LearningToRankRescorerBuilder.NAME.getPreferredName(), + in -> new LearningToRankRescorerBuilder(in, learningToRankService) + ) + ); return new NamedWriteableRegistry(namedWriteables); } - @Override - protected NamedWriteableRegistry getNamedWriteableRegistry() { - return writableRegistry(); - } - private static Map randomParams() { return randomMap(1, randomIntBetween(1, 10), () -> new Tuple<>(randomIdentifier(), randomIdentifier())); } - - private static LocalModel localModelMock() { - LocalModel model = mock(LocalModel.class); - String modelId = randomIdentifier(); - when(model.getModelId()).thenReturn(modelId); - return model; - } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java index baae42b99640f..db81fc2db3348 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsPersisterTests.java @@ -358,10 +358,12 @@ public void testPersistQuantilesSync_QuantilesDocumentCreated() { } public void testPersistQuantilesSync_QuantilesDocumentUpdated() { - testPersistQuantilesSync( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), - ".ml-state-dummy" - ); + var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + try { + testPersistQuantilesSync(hits, ".ml-state-dummy"); + } finally { + hits.decRef(); + } } @SuppressWarnings("unchecked") @@ -397,10 +399,12 @@ public void testPersistQuantilesAsync_QuantilesDocumentCreated() { } public void testPersistQuantilesAsync_QuantilesDocumentUpdated() { - testPersistQuantilesAsync( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), - ".ml-state-dummy" - ); + var hits = new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f); + try { + testPersistQuantilesAsync(hits, ".ml-state-dummy"); + } finally { + hits.decRef(); + } } @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index 8179a97955a57..3dcbbeb3fcce5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -928,7 +928,8 @@ private static SearchResponse createSearchResponse(List> sou list.add(hit); } SearchHits hits = new SearchHits(list.toArray(SearchHits.EMPTY), new TotalHits(source.size(), TotalHits.Relation.EQUAL_TO), 1); - when(response.getHits()).thenReturn(hits); + when(response.getHits()).thenReturn(hits.asUnpooled()); + hits.decRef(); return response; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java index 59a79def9bd10..33e5582ec992a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java @@ -137,7 +137,8 @@ public MockClientBuilder prepareSearch(String indexName, List do SearchResponse response = mock(SearchResponse.class); SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0.0f); - when(response.getHits()).thenReturn(searchHits); + when(response.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); doAnswer(new Answer() { @Override @@ -176,7 +177,8 @@ public MockClientBuilder prepareSearchFields(String indexName, List() { @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java index 90280bc08de17..47f7d8c65a27a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/StateStreamerTests.java @@ -107,7 +107,8 @@ private static SearchResponse createSearchResponse(List> sou hits[i++] = hit; } SearchHits searchHits = new SearchHits(hits, null, (float) 0.0); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java index 3048a1144ac55..6ec43ca2a3201 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/retention/AbstractExpiredJobDataRemoverTests.java @@ -97,7 +97,8 @@ static SearchResponse createSearchResponseFromHits(List hits) { 1.0f ); SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); return searchResponse; } @@ -111,7 +112,8 @@ private static SearchResponse createSearchResponse(List to } SearchHits hits = new SearchHits(hitsArray, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1.0f); SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.getHits()).thenReturn(hits); + when(searchResponse.getHits()).thenReturn(hits.asUnpooled()); + hits.decRef(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java index 520efd5e77244..a7ba148584637 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/process/IndexingStateProcessorTests.java @@ -124,7 +124,7 @@ public void testStateRead_StateDocumentCreated() throws IOException { public void testStateRead_StateDocumentUpdated() throws IOException { testStateRead( - new SearchHits(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.createFromMap(Map.of("_index", ".ml-state-dummy")) }, null, 0.0f), ".ml-state-dummy" ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java index f2affbe6d2869..59a3b86ef0bd5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/test/SearchHitBuilder.java @@ -22,7 +22,7 @@ public class SearchHitBuilder { private final SearchHit hit; public SearchHitBuilder(int docId) { - hit = new SearchHit(docId, null); + hit = SearchHit.unpooled(docId, null); } public SearchHitBuilder addField(String name, Object value) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java index 939ccde7df6c4..015614e56c02b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java @@ -27,7 +27,7 @@ public class TransportVersionUtilsTests extends ESTestCase { "Bertram", new CompatibilityVersions(TransportVersions.V_7_0_1, Map.of()), "Charles", - new CompatibilityVersions(TransportVersions.V_8_500_020, Map.of()), + new CompatibilityVersions(TransportVersions.V_8_9_X, Map.of()), "Dominic", new CompatibilityVersions(TransportVersions.V_8_0_0, Map.of()) ); @@ -79,6 +79,6 @@ public void testIsMinTransformVersionSameAsCurrent() { public void testIsMinTransportVersionOnOrAfter() { assertThat(TransportVersionUtils.isMinTransportVersionOnOrAfter(state, TransportVersions.V_7_0_0), equalTo(true)); - assertThat(TransportVersionUtils.isMinTransportVersionOnOrAfter(state, TransportVersions.V_8_500_020), equalTo(false)); + assertThat(TransportVersionUtils.isMinTransportVersionOnOrAfter(state, TransportVersions.V_8_9_X), equalTo(false)); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java index 4f1308e9295c2..4fded8ef8d05d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIteratorTests.java @@ -172,7 +172,8 @@ protected SearchResponse createSearchResponseWithHits(String... hits) { SearchHits searchHits = createHits(hits); SearchResponse searchResponse = mock(SearchResponse.class); when(searchResponse.getScrollId()).thenReturn(SCROLL_ID); - when(searchResponse.getHits()).thenReturn(searchHits); + when(searchResponse.getHits()).thenReturn(searchHits.asUnpooled()); + searchHits.decRef(); return searchResponse; } diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java index 8dbab6e8c06a5..f3417dbf5d472 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStatusActionIT.java @@ -52,10 +52,11 @@ public void testNoTimeoutIfNotWaiting() throws Exception { assertFalse(response.hasData()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104035") public void testWaitsUntilResourcesAreCreated() throws Exception { updateProfilingTemplatesEnabled(true); GetStatusAction.Request request = new GetStatusAction.Request(); + // higher timeout since we have more shards than usual + request.timeout(TimeValue.timeValueSeconds(120)); request.waitForResourcesCreated(true); GetStatusAction.Response response = client().execute(GetStatusAction.INSTANCE, request).get(); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java index 004eae1395dc1..d918a0def7ebb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStatusAction.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.IndicesOptions; @@ -177,7 +178,17 @@ private void execute(ClusterState state, ActionListener { + // no data yet + if (e instanceof SearchPhaseExecutionException) { + log.trace("Has data check has failed.", e); + listener.onResponse( + new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data, false) + ); + } else { + listener.onFailure(e); + } + })); } else { listener.onResponse(new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, false, anyPre891Data, false)); } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/tree/NodeSubclassTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/tree/NodeSubclassTests.java index 6393a3d6b9d67..a62acab36bdff 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/tree/NodeSubclassTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.UnresolvedAttributeTests; @@ -474,6 +475,10 @@ public boolean equals(Object obj) { */ return UnresolvedAttributeTests.randomUnresolvedAttribute(); } + if (EnrichPolicy.class == argClass) { + List enrichFields = randomSubsetOf(List.of("e1", "e2", "e3")); + return new EnrichPolicy(randomFrom("match", "range"), null, List.of(), randomFrom("m1", "m2"), enrichFields); + } if (Pipe.class == argClass) { /* diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java index 68b5b8953ccb7..bf979f9deabf0 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupJobTask.java @@ -11,12 +11,12 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.scheduler.SchedulerEngine; @@ -164,10 +164,10 @@ protected void doSaveState(IndexerState indexerState, Map positi @Override protected void onFinish(ActionListener listener) { final RollupJobConfig jobConfig = job.getConfig(); - final ActionListener refreshResponseActionListener = new ActionListener<>() { + final ActionListener refreshResponseActionListener = new ActionListener<>() { @Override - public void onResponse(RefreshResponse refreshResponse) { + public void onResponse(BroadcastResponse refreshResponse) { logger.trace("refreshing rollup index {} successful for job {}", jobConfig.getRollupIndex(), jobConfig.getId()); listener.onResponse(null); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java index 7fcde59f73088..430ba6d6faec5 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupJobTaskTests.java @@ -11,10 +11,10 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.scheduler.SchedulerEngine; import org.elasticsearch.common.settings.Settings; @@ -590,7 +590,7 @@ public void testTriggerWithoutHeaders() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), Collections.emptyMap()); Client client = mock(Client.class); doAnswer(invocationOnMock -> { - RefreshResponse r = new RefreshResponse(2, 2, 0, Collections.emptyList()); + BroadcastResponse r = new BroadcastResponse(2, 2, 0, Collections.emptyList()); ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -697,7 +697,7 @@ public void testTriggerWithHeaders() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers); Client client = mock(Client.class); doAnswer(invocationOnMock -> { - RefreshResponse r = new RefreshResponse(2, 2, 0, Collections.emptyList()); + BroadcastResponse r = new BroadcastResponse(2, 2, 0, Collections.emptyList()); ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -806,7 +806,7 @@ public void testSaveStateChangesIDScheme() throws Exception { RollupJob job = new RollupJob(ConfigTestHelpers.randomRollupJobConfig(random()), headers); Client client = mock(Client.class); doAnswer(invocationOnMock -> { - RefreshResponse r = new RefreshResponse(2, 2, 0, Collections.emptyList()); + BroadcastResponse r = new BroadcastResponse(2, 2, 0, Collections.emptyList()); ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(r); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java index 7ee81b444af46..a31d016c143ae 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsLicenseIntegTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.Metadata; @@ -31,7 +32,6 @@ import org.elasticsearch.xpack.core.searchablesnapshots.MountSearchableSnapshotRequest; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheAction; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheRequest; -import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheResponse; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsAction; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsRequest; import org.elasticsearch.xpack.searchablesnapshots.action.SearchableSnapshotsStatsResponse; @@ -121,11 +121,11 @@ public void testStatsRequiresLicense() throws ExecutionException, InterruptedExc } public void testClearCacheRequiresLicense() throws ExecutionException, InterruptedException { - final ActionFuture future = client().execute( + final ActionFuture future = client().execute( ClearSearchableSnapshotsCacheAction.INSTANCE, new ClearSearchableSnapshotsCacheRequest(indexName) ); - final ClearSearchableSnapshotsCacheResponse response = future.get(); + final BroadcastResponse response = future.get(); assertThat(response.getTotalShards(), greaterThan(0)); assertThat(response.getSuccessfulShards(), equalTo(0)); for (DefaultShardOperationFailedException shardFailure : response.getShardFailures()) { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java index 37b3ecfd36959..c1c40acbd43c5 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java @@ -8,10 +8,9 @@ package org.elasticsearch.xpack.searchablesnapshots.cache.blob; import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; @@ -139,7 +138,7 @@ public void testBlobStoreCache() throws Exception { if (randomBoolean()) { logger.info("--> force-merging index before snapshotting"); - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).get(); + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge(indexName).setMaxNumSegments(1).get(); assertThat(forceMergeResponse.getSuccessfulShards(), equalTo(numberOfShards.totalNumShards)); assertThat(forceMergeResponse.getFailedShards(), equalTo(0)); } @@ -355,7 +354,7 @@ private Client systemClient() { private void refreshSystemIndex() { try { - final RefreshResponse refreshResponse = systemClient().admin().indices().prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX).get(); + final BroadcastResponse refreshResponse = systemClient().admin().indices().prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX).get(); assertThat(refreshResponse.getSuccessfulShards(), greaterThan(0)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); } catch (IndexNotFoundException indexNotFoundException) { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 981ffe2832e66..56074f97650f0 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -10,9 +10,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.blobcache.common.ByteRange; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; @@ -329,7 +329,7 @@ private long numberOfEntriesInCache() { private void refreshSystemIndex(boolean failIfNotExist) { try { - final RefreshResponse refreshResponse = systemClient().admin() + final BroadcastResponse refreshResponse = systemClient().admin() .indices() .prepareRefresh(SNAPSHOT_BLOB_CACHE_INDEX) .setIndicesOptions(failIfNotExist ? RefreshRequest.DEFAULT_INDICES_OPTIONS : IndicesOptions.LENIENT_EXPAND_OPEN) diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java index 42ac63579b6c6..b260f6cf2a891 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.searchablesnapshots.cache.shared; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.blobcache.shared.SharedBlobCacheService; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.ShardRouting; @@ -22,7 +23,6 @@ import org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheAction; import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheRequest; -import org.elasticsearch.xpack.searchablesnapshots.action.ClearSearchableSnapshotsCacheResponse; import org.elasticsearch.xpack.searchablesnapshots.action.cache.TransportSearchableSnapshotsNodeCachesStatsAction; import org.elasticsearch.xpack.searchablesnapshots.action.cache.TransportSearchableSnapshotsNodeCachesStatsAction.NodeCachesStatsResponse; import org.elasticsearch.xpack.searchablesnapshots.action.cache.TransportSearchableSnapshotsNodeCachesStatsAction.NodesCachesStatsResponse; @@ -117,7 +117,7 @@ public void testNodesCachesStats() throws Exception { assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); - final ClearSearchableSnapshotsCacheResponse clearCacheResponse = client().execute( + final BroadcastResponse clearCacheResponse = client().execute( ClearSearchableSnapshotsCacheAction.INSTANCE, new ClearSearchableSnapshotsCacheRequest(mountedIndex) ).actionGet(); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java index 9628bc75cd337..f57761b806599 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheAction.java @@ -7,13 +7,14 @@ package org.elasticsearch.xpack.searchablesnapshots.action; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; -public class ClearSearchableSnapshotsCacheAction extends ActionType { +public class ClearSearchableSnapshotsCacheAction extends ActionType { public static final ClearSearchableSnapshotsCacheAction INSTANCE = new ClearSearchableSnapshotsCacheAction(); static final String NAME = "cluster:admin/xpack/searchable_snapshots/cache/clear"; private ClearSearchableSnapshotsCacheAction() { - super(NAME, ClearSearchableSnapshotsCacheResponse::new); + super(NAME, BroadcastResponse::new); } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheResponse.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheResponse.java deleted file mode 100644 index 23a566f23d71b..0000000000000 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/ClearSearchableSnapshotsCacheResponse.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.searchablesnapshots.action; - -import org.elasticsearch.action.support.DefaultShardOperationFailedException; -import org.elasticsearch.action.support.broadcast.BroadcastResponse; -import org.elasticsearch.common.io.stream.StreamInput; - -import java.io.IOException; -import java.util.List; - -public class ClearSearchableSnapshotsCacheResponse extends BroadcastResponse { - - ClearSearchableSnapshotsCacheResponse(StreamInput in) throws IOException { - super(in); - } - - ClearSearchableSnapshotsCacheResponse( - int totalShards, - int successfulShards, - int failedShards, - List shardFailures - ) { - super(totalShards, successfulShards, failedShards, shardFailures); - } -} diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportClearSearchableSnapshotsCacheAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportClearSearchableSnapshotsCacheAction.java index 8a4d21b4a98b8..077ee165d58ef 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportClearSearchableSnapshotsCacheAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportClearSearchableSnapshotsCacheAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.searchablesnapshots.action; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction.EmptyResult; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -24,7 +25,7 @@ public class TransportClearSearchableSnapshotsCacheAction extends AbstractTransportSearchableSnapshotsAction< ClearSearchableSnapshotsCacheRequest, - ClearSearchableSnapshotsCacheResponse, + BroadcastResponse, EmptyResult> { @Inject @@ -56,11 +57,11 @@ protected EmptyResult readShardResult(StreamInput in) { } @Override - protected ResponseFactory getResponseFactory( + protected ResponseFactory getResponseFactory( ClearSearchableSnapshotsCacheRequest request, ClusterState clusterState ) { - return (totalShards, successfulShards, failedShards, emptyResults, shardFailures) -> new ClearSearchableSnapshotsCacheResponse( + return (totalShards, successfulShards, failedShards, emptyResults, shardFailures) -> new BroadcastResponse( totalShards, successfulShards, failedShards, diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index cab0c2bff28f0..453f489240f77 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -282,6 +282,7 @@ public class Constants { "cluster:admin/xpack/security/user/change_password", "cluster:admin/xpack/security/user/delete", "cluster:admin/xpack/security/user/get", + "cluster:admin/xpack/security/user/query", "cluster:admin/xpack/security/user/has_privileges", "cluster:admin/xpack/security/user/list_privileges", "cluster:admin/xpack/security/user/put", diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java new file mode 100644 index 0000000000000..8e6290163efcd --- /dev/null +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java @@ -0,0 +1,490 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Consumer; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.hamcrest.core.IsNot.not; + +public class QueryUserIT extends SecurityInBasicRestTestCase { + + private static final String READ_USERS_USER_AUTH_HEADER = "Basic cmVhZF91c2Vyc191c2VyOnJlYWQtdXNlcnMtcGFzc3dvcmQ="; + private static final String TEST_USER_NO_READ_USERS_AUTH_HEADER = "Basic c2VjdXJpdHlfdGVzdF91c2VyOnNlY3VyaXR5LXRlc3QtcGFzc3dvcmQ="; + + private static final Set reservedUsers = Set.of( + "elastic", + "kibana", + "kibana_system", + "logstash_system", + "beats_system", + "apm_system", + "remote_monitoring_user" + ); + + private Request queryUserRequestWithAuth() { + final Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/user"); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_USERS_USER_AUTH_HEADER)); + return request; + } + + public void testQuery() throws IOException { + // No users to match yet + assertQuery("", users -> assertThat(users, empty())); + + int randomUserCount = createRandomUsers().size(); + + // An empty request body means search for all users (page size = 10) + assertQuery("", users -> assertThat(users.size(), equalTo(Math.min(randomUserCount, 10)))); + + // Match all + assertQuery( + String.format(""" + {"query":{"match_all":{}},"from":0,"size":%s}""", randomUserCount), + users -> assertThat(users.size(), equalTo(randomUserCount)) + ); + + // Exists query + String field = randomFrom("username", "full_name", "roles", "enabled"); + assertQuery( + String.format(""" + {"query":{"exists":{"field":"%s"}},"from":0,"size":%s}""", field, randomUserCount), + users -> assertEquals(users.size(), randomUserCount) + ); + + // Prefix search + User prefixUser1 = createUser( + "mr-prefix1", + new String[] { "master-of-the-universe", "some-other-role" }, + "Prefix1", + "email@something.com", + Map.of(), + true + ); + User prefixUser2 = createUser( + "mr-prefix2", + new String[] { "master-of-the-world", "some-other-role" }, + "Prefix2", + "email@something.com", + Map.of(), + true + ); + assertQuery(""" + {"query":{"bool":{"must":[{"prefix":{"roles":"master-of-the"}}]}}}""", returnedUsers -> { + assertThat(returnedUsers, hasSize(2)); + assertUser(prefixUser1, returnedUsers.get(0)); + assertUser(prefixUser2, returnedUsers.get(1)); + }); + + // Wildcard search + assertQuery(""" + { "query": { "wildcard": {"username": "mr-prefix*"} } }""", users -> { + assertThat(users.size(), equalTo(2)); + assertUser(prefixUser1, users.get(0)); + assertUser(prefixUser2, users.get(1)); + users.forEach(k -> assertThat(k, not(hasKey("_sort")))); + }); + + // Terms query + assertQuery(""" + {"query":{"terms":{"roles":["some-other-role"]}}}""", users -> { + assertThat(users.size(), equalTo(2)); + assertUser(prefixUser1, users.get(0)); + assertUser(prefixUser2, users.get(1)); + }); + + // Test other fields + User otherFieldsTestUser = createUser( + "batman-official-user", + new String[] { "bat-cave-admin" }, + "Batman", + "batman@hotmail.com", + Map.of(), + true + ); + String enabledTerm = "\"enabled\":true"; + String fullNameTerm = "\"full_name\":\"batman\""; + String emailTerm = "\"email\":\"batman@hotmail.com\""; + + final String term = randomFrom(enabledTerm, fullNameTerm, emailTerm); + assertQuery( + Strings.format(""" + {"query":{"term":{%s}},"size":100}""", term), + users -> assertThat( + users.stream().map(u -> u.get(User.Fields.USERNAME.getPreferredName()).toString()).toList(), + hasItem("batman-official-user") + ) + ); + + // Test complex query + assertQuery(""" + { "query": {"bool": {"must": [ + {"wildcard": {"username": "batman-official*"}}, + {"term": {"enabled": true}}],"filter": [{"prefix": {"roles": "bat-cave"}}]}}}""", users -> { + assertThat(users.size(), equalTo(1)); + assertUser(otherFieldsTestUser, users.get(0)); + }); + + // Search for fields outside the allowlist fails + assertQueryError(400, """ + { "query": { "prefix": {"not_allowed": "ABC"} } }"""); + + // Search for fields that are not allowed in Query DSL but used internally by the service itself + final String fieldName = randomFrom("type", "password"); + assertQueryError(400, Strings.format(""" + { "query": { "term": {"%s": "%s"} } }""", fieldName, randomAlphaOfLengthBetween(3, 8))); + + // User without read_security gets 403 trying to search Users + assertQueryError(TEST_USER_NO_READ_USERS_AUTH_HEADER, 403, """ + { "query": { "wildcard": {"name": "*prefix*"} } }"""); + + // Range query not supported + assertQueryError(400, """ + {"query":{"range":{"username":{"lt":"now"}}}}"""); + + // IDs query not supported + assertQueryError(400, """ + { "query": { "ids": { "values": "abc" } } }"""); + + // Make sure we can't query reserved users + String reservedUsername = getReservedUsernameAndAssertExists(); + assertQuery(String.format(""" + {"query":{"term":{"username":"%s"}}}""", reservedUsername), users -> assertTrue(users.isEmpty())); + } + + public void testPagination() throws IOException { + final List users = createRandomUsers(); + + final int from = randomIntBetween(0, 3); + final int size = randomIntBetween(2, 5); + final int remaining = users.size() - from; + + // Using string only sorting to simplify test + final String sortField = "username"; + final List> allUserInfos = new ArrayList<>(remaining); + { + Request request = queryUserRequestWithAuth(); + request.setJsonEntity("{\"from\":" + from + ",\"size\":" + size + ",\"sort\":[\"" + sortField + "\"]}"); + allUserInfos.addAll(collectUsers(request, users.size())); + } + // first batch should be a full page + assertThat(allUserInfos.size(), equalTo(size)); + + while (allUserInfos.size() < remaining) { + final Request request = queryUserRequestWithAuth(); + final List sortValues = extractSortValues(allUserInfos.get(allUserInfos.size() - 1)); + + request.setJsonEntity(Strings.format(""" + {"size":%s,"sort":["%s"],"search_after":["%s"]} + """, size, sortField, sortValues.get(0))); + final List> userInfoPage = collectUsers(request, users.size()); + + if (userInfoPage.isEmpty() && allUserInfos.size() < remaining) { + fail("fail to retrieve all Users, expect [" + remaining + "], got [" + allUserInfos + "]"); + } + allUserInfos.addAll(userInfoPage); + + // Before all users are retrieved, each page should be a full page + if (allUserInfos.size() < remaining) { + assertThat(userInfoPage.size(), equalTo(size)); + } + } + + // Assert sort values match the field of User information + assertThat( + allUserInfos.stream().map(m -> m.get(sortField)).toList(), + equalTo(allUserInfos.stream().map(m -> extractSortValues(m).get(0)).toList()) + ); + + // Assert that all users match the created users and that they're sorted correctly + assertUsers(users, allUserInfos, sortField, from); + + // size can be zero, but total should still reflect the number of users matched + final Request request = queryUserRequestWithAuth(); + request.setJsonEntity("{\"size\":0}"); + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + assertThat(responseMap.get("total"), equalTo(users.size())); + assertThat(responseMap.get("count"), equalTo(0)); + } + + @SuppressWarnings("unchecked") + public void testSort() throws IOException { + final List testUsers = List.of( + createUser("a", new String[] { "4", "5", "6" }), + createUser("b", new String[] { "5", "6" }), + createUser("c", new String[] { "7", "8" }) + ); + assertQuery(""" + {"sort":[{"username":{"order":"desc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 2, j = 0; i >= 0; i--, j++) { + assertUser(testUsers.get(j), users.get(i)); + assertThat(users.get(i).get("username"), equalTo(((List) users.get(i).get("_sort")).get(0))); + } + }); + + assertQuery(""" + {"sort":[{"username":{"order":"asc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 0; i <= 2; i++) { + assertUser(testUsers.get(i), users.get(i)); + assertThat(users.get(i).get("username"), equalTo(((List) users.get(i).get("_sort")).get(0))); + } + }); + + assertQuery(""" + {"sort":[{"roles":{"order":"asc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 0; i <= 2; i++) { + assertUser(testUsers.get(i), users.get(i)); + // Only first element of array is used for sorting + assertThat(((List) users.get(i).get("roles")).get(0), equalTo(((List) users.get(i).get("_sort")).get(0))); + } + }); + + // Make sure sorting on _doc works + assertQuery(""" + {"sort":["_doc"]}""", users -> assertThat(users.size(), equalTo(3))); + + // Make sure multi-field sorting works + assertQuery(""" + {"sort":[{"username":{"order":"asc"}}, {"roles":{"order":"asc"}}]}""", users -> { + assertThat(users.size(), equalTo(3)); + for (int i = 0; i <= 2; i++) { + assertUser(testUsers.get(i), users.get(i)); + assertThat(users.get(i).get("username"), equalTo(((List) users.get(i).get("_sort")).get(0))); + assertThat(((List) users.get(i).get("roles")).get(0), equalTo(((List) users.get(i).get("_sort")).get(1))); + } + }); + + final String invalidFieldName = randomFrom("doc_type", "invalid", "password"); + assertQueryError(400, "{\"sort\":[\"" + invalidFieldName + "\"]}"); + + final String invalidSortName = randomFrom("email", "full_name"); + assertQueryError( + READ_USERS_USER_AUTH_HEADER, + 400, + String.format("{\"sort\":[\"%s\"]}", invalidSortName), + String.format("sorting is not supported for field [%s] in User query", invalidSortName) + ); + } + + private String getReservedUsernameAndAssertExists() throws IOException { + String username = randomFrom(reservedUsers); + final Request request = new Request("GET", "/_security/user"); + + if (randomBoolean()) { + // Update the user to create it in the security index + Request putUserRequest = new Request("PUT", "/_security/user/" + username); + putUserRequest.setJsonEntity("{\"enabled\": true}"); + } + + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_USERS_USER_AUTH_HEADER)); + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + assertNotNull(responseMap.get(username)); + return username; + } + + @SuppressWarnings("unchecked") + private List extractSortValues(Map userInfo) { + return (List) userInfo.get("_sort"); + } + + private List> collectUsers(Request request, int total) throws IOException { + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + @SuppressWarnings("unchecked") + final List> userInfos = (List>) responseMap.get("users"); + assertThat(responseMap.get("total"), equalTo(total)); + assertThat(responseMap.get("count"), equalTo(userInfos.size())); + return userInfos; + } + + private void assertQueryError(int statusCode, String body) { + assertQueryError(READ_USERS_USER_AUTH_HEADER, statusCode, body); + } + + private void assertQueryError(String authHeader, int statusCode, String body) { + assertQueryError(authHeader, statusCode, body, null); + } + + private void assertQueryError(String authHeader, int statusCode, String body, String errorMessage) { + final Request request = new Request(randomFrom("GET", "POST"), "/_security/_query/user"); + request.setJsonEntity(body); + request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, authHeader)); + final ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(statusCode)); + if (errorMessage != null) { + assertTrue(responseException.getMessage().contains(errorMessage)); + } + } + + private void assertQuery(String body, Consumer>> userVerifier) throws IOException { + final Request request = queryUserRequestWithAuth(); + request.setJsonEntity(body); + final Response response = client().performRequest(request); + assertOK(response); + final Map responseMap = responseAsMap(response); + @SuppressWarnings("unchecked") + final List> users = (List>) responseMap.get("users"); + userVerifier.accept(users); + } + + private void assertUser(User expectedUser, Map actualUser) { + assertUser(userToMap(expectedUser), actualUser); + } + + @SuppressWarnings("unchecked") + private void assertUser(Map expectedUser, Map actualUser) { + assertEquals(expectedUser.get(User.Fields.USERNAME.getPreferredName()), actualUser.get(User.Fields.USERNAME.getPreferredName())); + assertArrayEquals( + ((List) expectedUser.get(User.Fields.ROLES.getPreferredName())).toArray(), + ((List) actualUser.get(User.Fields.ROLES.getPreferredName())).toArray() + ); + assertEquals(expectedUser.get(User.Fields.FULL_NAME.getPreferredName()), actualUser.get(User.Fields.FULL_NAME.getPreferredName())); + assertEquals(expectedUser.get(User.Fields.EMAIL.getPreferredName()), actualUser.get(User.Fields.EMAIL.getPreferredName())); + assertEquals(expectedUser.get(User.Fields.METADATA.getPreferredName()), actualUser.get(User.Fields.METADATA.getPreferredName())); + assertEquals(expectedUser.get(User.Fields.ENABLED.getPreferredName()), actualUser.get(User.Fields.ENABLED.getPreferredName())); + } + + private Map userToMap(User user) { + return Map.of( + User.Fields.USERNAME.getPreferredName(), + user.principal(), + User.Fields.ROLES.getPreferredName(), + Arrays.stream(user.roles()).toList(), + User.Fields.FULL_NAME.getPreferredName(), + user.fullName(), + User.Fields.EMAIL.getPreferredName(), + user.email(), + User.Fields.METADATA.getPreferredName(), + user.metadata(), + User.Fields.ENABLED.getPreferredName(), + user.enabled() + ); + } + + private void assertUsers(List expectedUsers, List> actualUsers, String sortField, int from) { + assertEquals(expectedUsers.size() - from, actualUsers.size()); + + List> sortedExpectedUsers = expectedUsers.stream() + .map(this::userToMap) + .sorted(Comparator.comparing(user -> user.get(sortField).toString())) + .toList(); + + for (int i = from; i < sortedExpectedUsers.size(); i++) { + assertUser(sortedExpectedUsers.get(i), actualUsers.get(i - from)); + } + } + + public static Map randomUserMetadata() { + return ESTestCase.randomFrom( + Map.of( + "employee_id", + ESTestCase.randomAlphaOfLength(5), + "number", + 1, + "numbers", + List.of(1, 3, 5), + "extra", + Map.of("favorite pizza", "margherita", "age", 42) + ), + Map.of(ESTestCase.randomAlphaOfLengthBetween(3, 8), ESTestCase.randomAlphaOfLengthBetween(3, 8)), + Map.of(), + null + ); + } + + private List createRandomUsers() throws IOException { + int randomUserCount = randomIntBetween(8, 15); + final List users = new ArrayList<>(randomUserCount); + + for (int i = 0; i < randomUserCount; i++) { + users.add( + createUser( + randomValueOtherThanMany(reservedUsers::contains, () -> randomAlphaOfLengthBetween(3, 8)) + "-" + i, + randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8)), + randomAlphaOfLengthBetween(3, 8), + randomAlphaOfLengthBetween(3, 8), + randomUserMetadata(), + randomBoolean() + ) + ); + } + + return users; + } + + private User createUser(String userName, String[] roles) throws IOException { + return createUser( + userName, + roles, + randomAlphaOfLengthBetween(3, 8), + randomAlphaOfLengthBetween(3, 8), + randomUserMetadata(), + randomBoolean() + ); + } + + private User createUser(String userName, String[] roles, String fullName, String email, Map metadata, boolean enabled) + throws IOException { + + final Request request = new Request("POST", "/_security/user/" + userName); + BytesReference source = BytesReference.bytes( + jsonBuilder().map( + Map.of( + User.Fields.USERNAME.getPreferredName(), + userName, + User.Fields.ROLES.getPreferredName(), + roles, + User.Fields.FULL_NAME.getPreferredName(), + fullName, + User.Fields.EMAIL.getPreferredName(), + email, + User.Fields.METADATA.getPreferredName(), + metadata == null ? Map.of() : metadata, + User.Fields.PASSWORD.getPreferredName(), + "100%-security-guaranteed", + User.Fields.ENABLED.getPreferredName(), + enabled + ) + ) + ); + request.setJsonEntity(source.utf8ToString()); + Response response = adminClient().performRequest(request); + assertOK(response); + assertTrue((boolean) responseAsMap(response).get("created")); + return new User(userName, roles, fullName, email, metadata, enabled); + } +} diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java index 5843350e36457..587cc4643514c 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java @@ -22,6 +22,9 @@ public abstract class SecurityInBasicRestTestCase extends ESRestTestCase { protected static final String REST_USER = "security_test_user"; private static final SecureString REST_PASSWORD = new SecureString("security-test-password".toCharArray()); + protected static final String READ_USERS_USER = "read_users_user"; + private static final SecureString READ_USERS_PASSWORD = new SecureString("read-users-password".toCharArray()); + private static final String ADMIN_USER = "admin_user"; private static final SecureString ADMIN_PASSWORD = new SecureString("admin-password".toCharArray()); @@ -47,6 +50,7 @@ public abstract class SecurityInBasicRestTestCase extends ESRestTestCase { .user(REST_USER, REST_PASSWORD.toString(), "security_test_role", false) .user(API_KEY_USER, API_KEY_USER_PASSWORD.toString(), "api_key_user_role", false) .user(API_KEY_ADMIN_USER, API_KEY_ADMIN_USER_PASSWORD.toString(), "api_key_admin_role", false) + .user(READ_USERS_USER, READ_USERS_PASSWORD.toString(), "read_users_user_role", false) .build(); @Override diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml index 47f1c05ffaaf8..15c291274bcdb 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/resources/roles.yml @@ -18,6 +18,11 @@ api_key_user_role: cluster: - manage_own_api_key +# Used to perform query user operations +read_users_user_role: + cluster: + - read_security + # Role with remote indices privileges role_remote_indices: remote_indices: diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index 1e1d8a7f0654c..3fbcd00690e82 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -9,9 +9,8 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.SecureString; @@ -377,14 +376,14 @@ private void prepareIndices() { assertCacheState(DLS_TEMPLATE_ROLE_QUERY_INDEX, 0, 0); // Force merge the index to ensure there can be no background merges during the subsequent searches that would invalidate the cache - final ForceMergeResponse forceMergeResponse = indicesAdmin().prepareForceMerge( + final BroadcastResponse forceMergeResponse = indicesAdmin().prepareForceMerge( DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX ).setFlush(true).get(); ElasticsearchAssertions.assertAllSuccessful(forceMergeResponse); - final RefreshResponse refreshResponse = indicesAdmin().prepareRefresh(DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX) + final BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh(DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX) .get(); assertThat(refreshResponse.getFailedShards(), equalTo(0)); ensureGreen(DLS_INDEX, FLS_INDEX, INDEX, DLS_TEMPLATE_ROLE_QUERY_INDEX); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index e481cf70b9afe..79cf0cb9f7987 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -81,7 +81,6 @@ public void testFetchAllByEntityWithBrokenScroll() { request.scroll(TimeValue.timeValueHours(10L)); String scrollId = randomAlphaOfLength(5); - SearchHit[] hits = new SearchHit[] { new SearchHit(1), new SearchHit(2) }; Answer returnResponse = invocation -> { @SuppressWarnings("unchecked") @@ -89,7 +88,11 @@ public void testFetchAllByEntityWithBrokenScroll() { ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits(hits, new TotalHits(3, TotalHits.Relation.EQUAL_TO), 1), + SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(1), SearchHit.unpooled(2) }, + new TotalHits(3, TotalHits.Relation.EQUAL_TO), + 1 + ), null, null, false, diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 1329158f57d4d..a693c192f5fd2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -15,10 +15,10 @@ import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -810,7 +810,7 @@ private void doTestDeletionBehaviorWhenKeysBecomeInvalidBeforeAndAfterRetentionP private void refreshSecurityIndex() throws Exception { assertBusy(() -> { - final RefreshResponse refreshResponse = indicesAdmin().prepareRefresh(SECURITY_MAIN_ALIAS).get(); + final BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh(SECURITY_MAIN_ALIAS).get(); assertThat(refreshResponse.getFailedShards(), is(0)); }); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index c6b441d9cc04f..b6c6ea60d869f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -260,6 +260,7 @@ import org.elasticsearch.xpack.security.action.user.TransportGetUsersAction; import org.elasticsearch.xpack.security.action.user.TransportHasPrivilegesAction; import org.elasticsearch.xpack.security.action.user.TransportPutUserAction; +import org.elasticsearch.xpack.security.action.user.TransportQueryUserAction; import org.elasticsearch.xpack.security.action.user.TransportSetEnabledAction; import org.elasticsearch.xpack.security.audit.AuditTrail; import org.elasticsearch.xpack.security.audit.AuditTrailService; @@ -365,6 +366,7 @@ import org.elasticsearch.xpack.security.rest.action.user.RestHasPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.user.RestProfileHasPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction; +import org.elasticsearch.xpack.security.rest.action.user.RestQueryUserAction; import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction; import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry; import org.elasticsearch.xpack.security.support.ExtensionComponents; @@ -1315,6 +1317,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(ClearPrivilegesCacheAction.INSTANCE, TransportClearPrivilegesCacheAction.class), new ActionHandler<>(ClearSecurityCacheAction.INSTANCE, TransportClearSecurityCacheAction.class), new ActionHandler<>(GetUsersAction.INSTANCE, TransportGetUsersAction.class), + new ActionHandler<>(ActionTypes.QUERY_USER_ACTION, TransportQueryUserAction.class), new ActionHandler<>(PutUserAction.INSTANCE, TransportPutUserAction.class), new ActionHandler<>(DeleteUserAction.INSTANCE, TransportDeleteUserAction.class), new ActionHandler<>(GetRolesAction.INSTANCE, TransportGetRolesAction.class), @@ -1406,6 +1409,7 @@ public List getRestHandlers( new RestClearApiKeyCacheAction(settings, getLicenseState()), new RestClearServiceAccountTokenStoreCacheAction(settings, getLicenseState()), new RestGetUsersAction(settings, getLicenseState()), + new RestQueryUserAction(settings, getLicenseState()), new RestPutUserAction(settings, getLicenseState()), new RestDeleteUserAction(settings, getLicenseState()), new RestGetRolesAction(settings, getLicenseState()), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java new file mode 100644 index 0000000000000..2a9aef73ff62a --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java @@ -0,0 +1,107 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; +import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import org.elasticsearch.xpack.security.support.UserBoolQueryBuilder; + +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; +import static org.elasticsearch.xpack.security.support.UserBoolQueryBuilder.USER_FIELD_NAME_TRANSLATOR; + +public final class TransportQueryUserAction extends TransportAction { + private final NativeUsersStore usersStore; + private static final Set FIELD_NAMES_WITH_SORT_SUPPORT = Set.of("username", "roles", "enabled"); + + @Inject + public TransportQueryUserAction(TransportService transportService, ActionFilters actionFilters, NativeUsersStore usersStore) { + super(ActionTypes.QUERY_USER_ACTION.name(), actionFilters, transportService.getTaskManager()); + this.usersStore = usersStore; + } + + @Override + protected void doExecute(Task task, QueryUserRequest request, ActionListener listener) { + final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource() + .version(false) + .fetchSource(true) + .trackTotalHits(true); + + if (request.getFrom() != null) { + searchSourceBuilder.from(request.getFrom()); + } + if (request.getSize() != null) { + searchSourceBuilder.size(request.getSize()); + } + + searchSourceBuilder.query(UserBoolQueryBuilder.build(request.getQueryBuilder())); + + if (request.getFieldSortBuilders() != null) { + translateFieldSortBuilders(request.getFieldSortBuilders(), searchSourceBuilder); + } + + if (request.getSearchAfterBuilder() != null) { + searchSourceBuilder.searchAfter(request.getSearchAfterBuilder().getSortValues()); + } + + final SearchRequest searchRequest = new SearchRequest(new String[] { SECURITY_MAIN_ALIAS }, searchSourceBuilder); + usersStore.queryUsers(searchRequest, listener); + } + + // package private for testing + static void translateFieldSortBuilders(List fieldSortBuilders, SearchSourceBuilder searchSourceBuilder) { + fieldSortBuilders.forEach(fieldSortBuilder -> { + if (fieldSortBuilder.getNestedSort() != null) { + throw new IllegalArgumentException("nested sorting is not supported for User query"); + } + if (FieldSortBuilder.DOC_FIELD_NAME.equals(fieldSortBuilder.getFieldName())) { + searchSourceBuilder.sort(fieldSortBuilder); + } else { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(fieldSortBuilder.getFieldName()); + if (FIELD_NAMES_WITH_SORT_SUPPORT.contains(translatedFieldName) == false) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "sorting is not supported for field [%s] in User query", fieldSortBuilder.getFieldName()) + ); + } + + if (translatedFieldName.equals(fieldSortBuilder.getFieldName())) { + searchSourceBuilder.sort(fieldSortBuilder); + } else { + final FieldSortBuilder translatedFieldSortBuilder = new FieldSortBuilder(translatedFieldName).order( + fieldSortBuilder.order() + ) + .missing(fieldSortBuilder.missing()) + .unmappedType(fieldSortBuilder.unmappedType()) + .setFormat(fieldSortBuilder.getFormat()); + + if (fieldSortBuilder.sortMode() != null) { + translatedFieldSortBuilder.sortMode(fieldSortBuilder.sortMode()); + } + if (fieldSortBuilder.getNumericType() != null) { + translatedFieldSortBuilder.setNumericType(fieldSortBuilder.getNumericType()); + } + searchSourceBuilder.sort(translatedFieldSortBuilder); + } + } + }); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 5a8b228a1145c..26f6268aaa5dc 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -210,7 +210,7 @@ public class TokenService { static final TransportVersion VERSION_ACCESS_TOKENS_AS_UUIDS = TransportVersions.V_7_2_0; static final TransportVersion VERSION_MULTIPLE_CONCURRENT_REFRESHES = TransportVersions.V_7_2_0; static final TransportVersion VERSION_CLIENT_AUTH_FOR_REFRESH = TransportVersions.V_8_2_0; - static final TransportVersion VERSION_GET_TOKEN_DOC_FOR_REFRESH = TransportVersions.V_8_500_061; + static final TransportVersion VERSION_GET_TOKEN_DOC_FOR_REFRESH = TransportVersions.V_8_10_X; private static final Logger logger = LogManager.getLogger(TokenService.class); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 36f78682b6bd1..81aa487f73e2c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -43,6 +44,7 @@ import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -57,6 +59,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.function.Consumer; import java.util.function.Supplier; @@ -161,6 +164,40 @@ public void getUsers(String[] userNames, final ActionListener> } } + public void queryUsers(SearchRequest searchRequest, ActionListener listener) { + final SecurityIndexManager frozenSecurityIndex = securityIndex.defensiveCopy(); + if (frozenSecurityIndex.indexExists() == false) { + logger.debug("security index does not exist"); + listener.onResponse(QueryUserResponse.emptyResponse()); + } else if (frozenSecurityIndex.isAvailable(SEARCH_SHARDS) == false) { + listener.onFailure(frozenSecurityIndex.getUnavailableReason(SEARCH_SHARDS)); + } else { + securityIndex.checkIndexVersionThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client, + SECURITY_ORIGIN, + TransportSearchAction.TYPE, + searchRequest, + ActionListener.wrap(searchResponse -> { + final long total = searchResponse.getHits().getTotalHits().value; + if (total == 0) { + logger.debug("No users found for query [{}]", searchRequest.source().query()); + listener.onResponse(QueryUserResponse.emptyResponse()); + return; + } + + final List userItem = Arrays.stream(searchResponse.getHits().getHits()).map(hit -> { + UserAndPassword userAndPassword = transformUser(hit.getId(), hit.getSourceAsMap()); + return userAndPassword != null ? new QueryUserResponse.Item(userAndPassword.user(), hit.getSortValues()) : null; + }).filter(Objects::nonNull).toList(); + listener.onResponse(new QueryUserResponse(total, userItem)); + }, listener::onFailure) + ) + ); + } + } + void getUserCount(final ActionListener listener) { final SecurityIndexManager frozenSecurityIndex = this.securityIndex.defensiveCopy(); if (frozenSecurityIndex.indexExists() == false) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java new file mode 100644 index 0000000000000..407fe36fa82d3 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rest.action.user; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.index.query.AbstractQueryBuilder.parseTopLevelQuery; +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Rest action to search for Users + */ +public final class RestQueryUserAction extends SecurityBaseRestHandler { + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "query_user_request_payload", + a -> new Payload((QueryBuilder) a[0], (Integer) a[1], (Integer) a[2], (List) a[3], (SearchAfterBuilder) a[4]) + ); + + static { + PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseTopLevelQuery(p), new ParseField("query")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("from")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("size")); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return new FieldSortBuilder(p.text()); + } else if (p.currentToken() == XContentParser.Token.START_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, p.nextToken(), p); + final FieldSortBuilder fieldSortBuilder = FieldSortBuilder.fromXContent(p, p.currentName()); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, p.nextToken(), p); + return fieldSortBuilder; + } else { + throw new IllegalArgumentException("mal-formatted sort object"); + } + }, new ParseField("sort")); + PARSER.declareField( + optionalConstructorArg(), + (p, c) -> SearchAfterBuilder.fromXContent(p), + new ParseField("search_after"), + ObjectParser.ValueType.VALUE_ARRAY + ); + } + + /** + * @param settings the node's settings + * @param licenseState the license state that will be used to determine if + * security is licensed + */ + public RestQueryUserAction(Settings settings, XPackLicenseState licenseState) { + super(settings, licenseState); + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_security/_query/user"), new Route(POST, "/_security/_query/user")); + } + + @Override + public String getName() { + return "xpack_security_query_user"; + } + + @Override + protected RestChannelConsumer innerPrepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final QueryUserRequest queryUserRequest; + if (request.hasContentOrSourceParam()) { + final Payload payload = PARSER.parse(request.contentOrSourceParamParser(), null); + queryUserRequest = new QueryUserRequest( + payload.queryBuilder, + payload.from, + payload.size, + payload.fieldSortBuilders, + payload.searchAfterBuilder + ); + } else { + queryUserRequest = new QueryUserRequest(null, null, null, null, null); + } + return channel -> client.execute(ActionTypes.QUERY_USER_ACTION, queryUserRequest, new RestToXContentListener<>(channel)); + } + + private record Payload( + @Nullable QueryBuilder queryBuilder, + @Nullable Integer from, + @Nullable Integer size, + @Nullable List fieldSortBuilders, + @Nullable SearchAfterBuilder searchAfterBuilder + ) {} +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java new file mode 100644 index 0000000000000..291d55b7b0837 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityIndexFieldNameTranslator.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import java.util.List; +import java.util.function.Function; +import java.util.function.Predicate; + +public class SecurityIndexFieldNameTranslator { + + private final List fieldNameTranslators; + + public SecurityIndexFieldNameTranslator(List fieldNameTranslators) { + this.fieldNameTranslators = fieldNameTranslators; + } + + public String translate(String queryFieldName) { + for (FieldName fieldName : this.fieldNameTranslators) { + if (fieldName.supportsQueryName(queryFieldName)) { + return fieldName.indexFieldName(queryFieldName); + } + } + throw new IllegalArgumentException("Field [" + queryFieldName + "] is not allowed"); + } + + public boolean supportedIndexFieldName(String indexFieldName) { + for (FieldName fieldName : this.fieldNameTranslators) { + if (fieldName.supportsIndexName(indexFieldName)) { + return true; + } + } + return false; + } + + public static FieldName exact(String name) { + return exact(name, Function.identity()); + } + + public static FieldName exact(String name, Function translation) { + return new SecurityIndexFieldNameTranslator.ExactFieldName(name, translation); + } + + public abstract static class FieldName { + private final Function toIndexFieldName; + protected final Predicate validIndexNamePredicate; + + FieldName(Function toIndexFieldName, Predicate validIndexNamePredicate) { + this.toIndexFieldName = toIndexFieldName; + this.validIndexNamePredicate = validIndexNamePredicate; + } + + public abstract boolean supportsQueryName(String queryFieldName); + + public abstract boolean supportsIndexName(String indexFieldName); + + public String indexFieldName(String queryFieldName) { + return toIndexFieldName.apply(queryFieldName); + } + } + + private static class ExactFieldName extends FieldName { + private final String name; + + private ExactFieldName(String name, Function toIndexFieldName) { + super(toIndexFieldName, fieldName -> toIndexFieldName.apply(name).equals(fieldName)); + this.name = name; + } + + @Override + public boolean supportsQueryName(String queryFieldName) { + return queryFieldName.equals(name); + } + + @Override + public boolean supportsIndexName(String indexFieldName) { + return validIndexNamePredicate.test(indexFieldName); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilder.java new file mode 100644 index 0000000000000..5d3824ab1f8ce --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilder.java @@ -0,0 +1,101 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.apache.lucene.search.Query; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.ExistsQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.PrefixQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.security.support.SecurityIndexFieldNameTranslator.exact; + +public class UserBoolQueryBuilder extends BoolQueryBuilder { + public static final SecurityIndexFieldNameTranslator USER_FIELD_NAME_TRANSLATOR = new SecurityIndexFieldNameTranslator( + List.of(exact("username"), exact("roles"), exact("full_name"), exact("email"), exact("enabled")) + ); + + private UserBoolQueryBuilder() {} + + public static UserBoolQueryBuilder build(QueryBuilder queryBuilder) { + UserBoolQueryBuilder userQueryBuilder = new UserBoolQueryBuilder(); + if (queryBuilder != null) { + QueryBuilder translaterdQueryBuilder = translateToUserQueryBuilder(queryBuilder); + userQueryBuilder.must(translaterdQueryBuilder); + } + userQueryBuilder.filter(QueryBuilders.termQuery("type", "user")); + + return userQueryBuilder; + } + + private static QueryBuilder translateToUserQueryBuilder(QueryBuilder qb) { + if (qb instanceof final BoolQueryBuilder query) { + final BoolQueryBuilder newQuery = QueryBuilders.boolQuery() + .minimumShouldMatch(query.minimumShouldMatch()) + .adjustPureNegative(query.adjustPureNegative()); + query.must().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::must); + query.should().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::should); + query.mustNot().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::mustNot); + query.filter().stream().map(UserBoolQueryBuilder::translateToUserQueryBuilder).forEach(newQuery::filter); + return newQuery; + } else if (qb instanceof MatchAllQueryBuilder) { + return qb; + } else if (qb instanceof final TermQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.termQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); + } else if (qb instanceof final ExistsQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.existsQuery(translatedFieldName); + } else if (qb instanceof final TermsQueryBuilder query) { + if (query.termsLookup() != null) { + throw new IllegalArgumentException("Terms query with terms lookup is not supported for User query"); + } + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.termsQuery(translatedFieldName, query.getValues()); + } else if (qb instanceof final PrefixQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.prefixQuery(translatedFieldName, query.value()).caseInsensitive(query.caseInsensitive()); + } else if (qb instanceof final WildcardQueryBuilder query) { + final String translatedFieldName = USER_FIELD_NAME_TRANSLATOR.translate(query.fieldName()); + return QueryBuilders.wildcardQuery(translatedFieldName, query.value()) + .caseInsensitive(query.caseInsensitive()) + .rewrite(query.rewrite()); + } else { + throw new IllegalArgumentException("Query type [" + qb.getName() + "] is not supported for User query"); + } + } + + @Override + protected Query doToQuery(SearchExecutionContext context) throws IOException { + context.setAllowedFields(this::isIndexFieldNameAllowed); + return super.doToQuery(context); + } + + @Override + protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { + if (queryRewriteContext instanceof SearchExecutionContext) { + ((SearchExecutionContext) queryRewriteContext).setAllowedFields(this::isIndexFieldNameAllowed); + } + return super.doRewrite(queryRewriteContext); + } + + boolean isIndexFieldNameAllowed(String queryFieldName) { + // Type is needed to filter on user doc type + return queryFieldName.equals("type") || USER_FIELD_NAME_TRANSLATOR.supportedIndexFieldName(queryFieldName); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index a088e6c61822a..4127b8cdad32b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; @@ -34,6 +33,7 @@ import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; @@ -197,25 +197,30 @@ protected void SearchRequest searchRequest = (SearchRequest) request; searchRequests.add(searchRequest); final SearchHit[] hits = searchFunction.apply(searchRequest); - ActionListener.respondAndRelease( - listener, - (Response) new SearchResponse( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, - null, - false, - false, - null, - 1, - "_scrollId1", - 1, - 1, - 0, - 1, - null, - null - ) - ); + final var searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f); + try { + ActionListener.respondAndRelease( + listener, + (Response) new SearchResponse( + searchHits, + null, + null, + false, + false, + null, + 1, + "_scrollId1", + 1, + 1, + 0, + 1, + null, + null + ) + ); + } finally { + searchHits.decRef(); + } } else if (TransportSearchScrollAction.TYPE.name().equals(action.name())) { assertThat(request, instanceOf(SearchScrollRequest.class)); ActionListener.respondAndRelease( @@ -245,7 +250,7 @@ protected void listener.onResponse((Response) response); } else if (RefreshAction.NAME.equals(action.name())) { assertThat(request, instanceOf(RefreshRequest.class)); - listener.onResponse((Response) mock(RefreshResponse.class)); + listener.onResponse((Response) mock(BroadcastResponse.class)); } else { super.doExecute(action, request, listener); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java new file mode 100644 index 0000000000000..aa5f935998757 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.user; + +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.NestedSortBuilder; +import org.elasticsearch.search.sort.SortMode; +import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Locale; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + +public class TransportQueryUserActionTests extends ESTestCase { + private static final String[] allowedIndexFieldNames = new String[] { "username", "roles", "enabled" }; + + public void testTranslateFieldSortBuilders() { + final List fieldNames = List.of(allowedIndexFieldNames); + + final List originals = fieldNames.stream().map(this::randomFieldSortBuilderWithName).toList(); + + final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); + TransportQueryUserAction.translateFieldSortBuilders(originals, searchSourceBuilder); + + IntStream.range(0, originals.size()).forEach(i -> { + final FieldSortBuilder original = originals.get(i); + final FieldSortBuilder translated = (FieldSortBuilder) searchSourceBuilder.sorts().get(i); + assertThat(original.getFieldName(), equalTo(translated.getFieldName())); + + assertThat(translated.order(), equalTo(original.order())); + assertThat(translated.missing(), equalTo(original.missing())); + assertThat(translated.unmappedType(), equalTo(original.unmappedType())); + assertThat(translated.getNumericType(), equalTo(original.getNumericType())); + assertThat(translated.getFormat(), equalTo(original.getFormat())); + assertThat(translated.sortMode(), equalTo(original.sortMode())); + }); + } + + public void testNestedSortingIsNotAllowed() { + final FieldSortBuilder fieldSortBuilder = new FieldSortBuilder("roles"); + fieldSortBuilder.setNestedSort(new NestedSortBuilder("something")); + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportQueryUserAction.translateFieldSortBuilders(List.of(fieldSortBuilder), SearchSourceBuilder.searchSource()) + ); + assertThat(e.getMessage(), equalTo("nested sorting is not supported for User query")); + } + + public void testNestedSortingOnTextFieldsNotAllowed() { + String fieldName = randomFrom("full_name", "email"); + final List fieldNames = List.of(fieldName); + final List originals = fieldNames.stream().map(this::randomFieldSortBuilderWithName).toList(); + final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); + + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> TransportQueryUserAction.translateFieldSortBuilders(originals, searchSourceBuilder) + ); + assertThat(e.getMessage(), equalTo(String.format(Locale.ROOT, "sorting is not supported for field [%s] in User query", fieldName))); + } + + private FieldSortBuilder randomFieldSortBuilderWithName(String name) { + final FieldSortBuilder fieldSortBuilder = new FieldSortBuilder(name); + fieldSortBuilder.order(randomBoolean() ? SortOrder.ASC : SortOrder.DESC); + fieldSortBuilder.setFormat(randomBoolean() ? randomAlphaOfLengthBetween(3, 16) : null); + if (randomBoolean()) { + fieldSortBuilder.setNumericType(randomFrom("long", "double", "date", "date_nanos")); + } + if (randomBoolean()) { + fieldSortBuilder.missing(randomAlphaOfLengthBetween(3, 8)); + } + if (randomBoolean()) { + fieldSortBuilder.sortMode(randomFrom(SortMode.values())); + } + return fieldSortBuilder; + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index b921fef9fd917..ac11dee8d4a48 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -405,7 +405,7 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { when(client.prepareSearch(eq(SECURITY_MAIN_ALIAS))).thenReturn(new SearchRequestBuilder(client)); doAnswer(invocation -> { final var listener = (ActionListener) invocation.getArguments()[1]; - final var searchHit = new SearchHit(docId, apiKeyId); + final var searchHit = SearchHit.unpooled(docId, apiKeyId); try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.map(buildApiKeySourceDoc("some_hash".toCharArray())); searchHit.sourceRef(BytesReference.bytes(builder)); @@ -413,7 +413,7 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits( + SearchHits.unpooled( new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), randomFloat(), @@ -758,7 +758,7 @@ public void testCrossClusterApiKeyUsageStats() { ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits( + SearchHits.unpooled( searchHits.toArray(SearchHit[]::new), new TotalHits(searchHits.size(), TotalHits.Relation.EQUAL_TO), randomFloat(), @@ -825,7 +825,7 @@ private SearchHit searchHitForCrossClusterApiKey(int crossClusterAccessLevel) { }; final int docId = randomIntBetween(0, Integer.MAX_VALUE); final String apiKeyId = randomAlphaOfLength(20); - final var searchHit = new SearchHit(docId, apiKeyId); + final var searchHit = SearchHit.unpooled(docId, apiKeyId); try (XContentBuilder builder = JsonXContent.contentBuilder()) { builder.map(XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.format(""" { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 2f646631d14cd..3c542a33d25e9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -254,7 +254,7 @@ private static DiscoveryNode addAnotherPre8500DataNode(ClusterService clusterSer transportVersion = TransportVersions.V_8_8_1; } else { version = Version.V_8_9_0; - transportVersion = TransportVersions.V_8_500_020; + transportVersion = TransportVersions.V_8_9_X; } return addAnotherDataNodeWithVersion(clusterService, version, transportVersion); } @@ -1235,9 +1235,9 @@ private void mockTokenForRefreshToken( assertThat(refreshFilter.fieldName(), is("refresh_token.token")); final SearchHits hits; if (storedRefreshToken.equals(refreshFilter.value())) { - SearchHit hit = new SearchHit(randomInt(), "token_" + userToken.getId()); + SearchHit hit = SearchHit.unpooled(randomInt(), "token_" + userToken.getId()); hit.sourceRef(docSource); - hits = new SearchHits(new SearchHit[] { hit }, null, 1); + hits = SearchHits.unpooled(new SearchHit[] { hit }, null, 1); } else { hits = SearchHits.EMPTY_WITH_TOTAL_HITS; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java index 3a9fee4288bf2..33d3e6783b9e6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java @@ -263,7 +263,7 @@ public void testFindTokensFor() { if (r instanceof SearchRequest) { final SearchHit[] hits = IntStream.range(0, nhits) .mapToObj( - i -> new SearchHit( + i -> SearchHit.unpooled( randomIntBetween(0, Integer.MAX_VALUE), SERVICE_ACCOUNT_TOKEN_DOC_TYPE + "-" + accountId.asPrincipal() + "/" + tokenNames[i] ) @@ -272,7 +272,7 @@ public void testFindTokensFor() { ActionListener.respondAndRelease( l, new SearchResponse( - new SearchHits(hits, new TotalHits(nhits, TotalHits.Relation.EQUAL_TO), randomFloat(), null, null, null), + SearchHits.unpooled(hits, new TotalHits(nhits, TotalHits.Relation.EQUAL_TO), randomFloat(), null, null, null), null, null, false, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 169275ccc3ee3..a0008ba632151 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -346,7 +346,7 @@ private void doAnswerWithSearchResult(Client client, ExpressionRoleMapping mappi doAnswer(invocation -> { @SuppressWarnings("unchecked") final var listener = (ActionListener) invocation.getArguments()[1]; - final var searchHit = new SearchHit( + final var searchHit = SearchHit.unpooled( randomIntBetween(0, Integer.MAX_VALUE), NativeRoleMappingStore.getIdForName(mapping.getName()) ); @@ -357,14 +357,7 @@ private void doAnswerWithSearchResult(Client client, ExpressionRoleMapping mappi ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits( - new SearchHit[] { searchHit }, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), - randomFloat(), - null, - null, - null - ), + SearchHits.unpooled(new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), randomFloat()), null, null, false, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index 0c2f9cefbcffb..ed1b5e6c7668b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -818,22 +818,12 @@ private SearchHit[] buildHits(List sourcePrivile } private static SearchResponse buildSearchResponse(SearchHit[] hits) { - return new SearchResponse( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, - null, - false, - false, - null, - 1, - "_scrollId1", - 1, - 1, - 0, - 1, - null, - null - ); + var searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f); + try { + return new SearchResponse(searchHits.asUnpooled(), null, null, false, false, null, 1, "_scrollId1", 1, 1, 0, 1, null, null); + } finally { + searchHits.decRef(); + } } private void handleBulkRequest(int expectedCount, Predicate> isCreated) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java new file mode 100644 index 0000000000000..4a593eeb24ac6 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java @@ -0,0 +1,175 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rest.action.user; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.PrefixQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.AbstractRestChannel; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.searchafter.SearchAfterBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; + +import java.util.List; + +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.mockito.Mockito.mock; + +public class RestQueryUserActionTests extends ESTestCase { + + private final XPackLicenseState mockLicenseState = mock(XPackLicenseState.class); + + @Override + protected NamedXContentRegistry xContentRegistry() { + final SearchModule searchModule = new SearchModule(Settings.EMPTY, List.of()); + return new NamedXContentRegistry(searchModule.getNamedXContents()); + } + + public void testQueryParsing() throws Exception { + final String query1 = """ + { + "query": { + "bool": { + "must": [ + { + "terms": { + "username": [ "bart", "homer" ] + } + } + ], + "should": [ { "prefix": { "username": "ba" } } ] + } + } + }"""; + final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray(query1), + XContentType.JSON + ).build(); + + final SetOnce responseSetOnce = new SetOnce<>(); + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { + @Override + public void sendResponse(RestResponse restResponse) { + responseSetOnce.set(restResponse); + } + }; + + try (var threadPool = createThreadPool()) { + final var client = new NodeClient(Settings.EMPTY, threadPool) { + @SuppressWarnings("unchecked") + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + QueryUserRequest queryUserRequest = (QueryUserRequest) request; + final QueryBuilder queryBuilder = queryUserRequest.getQueryBuilder(); + assertNotNull(queryBuilder); + assertThat(queryBuilder.getClass(), is(BoolQueryBuilder.class)); + final BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder; + assertTrue(boolQueryBuilder.filter().isEmpty()); + assertTrue(boolQueryBuilder.mustNot().isEmpty()); + assertThat(boolQueryBuilder.must(), hasSize(1)); + final QueryBuilder mustQueryBuilder = boolQueryBuilder.must().get(0); + assertThat(mustQueryBuilder.getClass(), is(TermsQueryBuilder.class)); + assertThat(((TermsQueryBuilder) mustQueryBuilder).fieldName(), equalTo("username")); + assertThat(boolQueryBuilder.should(), hasSize(1)); + final QueryBuilder shouldQueryBuilder = boolQueryBuilder.should().get(0); + assertThat(shouldQueryBuilder.getClass(), is(PrefixQueryBuilder.class)); + assertThat(((PrefixQueryBuilder) shouldQueryBuilder).fieldName(), equalTo("username")); + listener.onResponse((Response) new QueryUserResponse(0, List.of())); + } + }; + final RestQueryUserAction restQueryUserAction = new RestQueryUserAction(Settings.EMPTY, mockLicenseState); + restQueryUserAction.handleRequest(restRequest, restChannel, client); + } + + assertNotNull(responseSetOnce.get()); + } + + public void testParsingSearchParameters() throws Exception { + final String requestBody = """ + { + "query": { + "match_all": {} + }, + "from": 42, + "size": 20, + "sort": [ "username", "full_name"], + "search_after": [ "bart" ] + }"""; + + final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent( + new BytesArray(requestBody), + XContentType.JSON + ).build(); + + final SetOnce responseSetOnce = new SetOnce<>(); + final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) { + @Override + public void sendResponse(RestResponse restResponse) { + responseSetOnce.set(restResponse); + } + }; + + try (var threadPool = createThreadPool()) { + final var client = new NodeClient(Settings.EMPTY, threadPool) { + @SuppressWarnings("unchecked") + @Override + public void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + QueryUserRequest queryUserRequest = (QueryUserRequest) request; + final QueryBuilder queryBuilder = queryUserRequest.getQueryBuilder(); + assertNotNull(queryBuilder); + assertThat(queryBuilder.getClass(), is(MatchAllQueryBuilder.class)); + assertThat(queryUserRequest.getFrom(), equalTo(42)); + assertThat(queryUserRequest.getSize(), equalTo(20)); + final List fieldSortBuilders = queryUserRequest.getFieldSortBuilders(); + assertThat(fieldSortBuilders, hasSize(2)); + + assertThat(fieldSortBuilders.get(0), equalTo(new FieldSortBuilder("username"))); + assertThat(fieldSortBuilders.get(1), equalTo(new FieldSortBuilder("full_name"))); + + final SearchAfterBuilder searchAfterBuilder = queryUserRequest.getSearchAfterBuilder(); + assertThat(searchAfterBuilder, equalTo(new SearchAfterBuilder().setSortValues(new String[] { "bart" }))); + + listener.onResponse((Response) new QueryUserResponse(0, List.of())); + } + }; + + final RestQueryUserAction queryUserAction = new RestQueryUserAction(Settings.EMPTY, mockLicenseState); + queryUserAction.handleRequest(restRequest, restChannel, client); + } + assertNotNull(responseSetOnce.get()); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilderTests.java new file mode 100644 index 0000000000000..460980d318786 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/UserBoolQueryBuilderTests.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.support; + +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.DistanceFeatureQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MultiTermQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.index.query.SpanQueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.indices.TermsLookup; +import org.elasticsearch.script.Script; +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.Is.is; +import static org.hamcrest.core.IsEqual.equalTo; +import static org.hamcrest.core.IsIterableContaining.hasItem; +import static org.hamcrest.core.StringContains.containsString; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +public class UserBoolQueryBuilderTests extends ESTestCase { + private static final String[] allowedIndexFieldNames = new String[] { "username", "roles", "full_name", "email", "enabled" }; + + public void testBuildFromSimpleQuery() { + final QueryBuilder query = randomSimpleQuery(); + final UserBoolQueryBuilder userQueryBuilder = UserBoolQueryBuilder.build(query); + assertCommonFilterQueries(userQueryBuilder); + final List mustQueries = userQueryBuilder.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), equalTo(query)); + assertTrue(userQueryBuilder.should().isEmpty()); + assertTrue(userQueryBuilder.mustNot().isEmpty()); + } + + public void testBuildFromBoolQuery() { + final BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery(); + + if (randomBoolean()) { + boolQueryBuilder.must(QueryBuilders.prefixQuery(randomAllowedField(), "bar")); + } + if (randomBoolean()) { + boolQueryBuilder.should(QueryBuilders.wildcardQuery(randomAllowedField(), "*ar*")); + } + if (randomBoolean()) { + boolQueryBuilder.filter(QueryBuilders.termsQuery("roles", randomArray(3, 8, String[]::new, () -> "role-" + randomInt()))); + } + if (randomBoolean()) { + boolQueryBuilder.minimumShouldMatch(randomIntBetween(1, 2)); + } + final UserBoolQueryBuilder userBoolQueryBuilder = UserBoolQueryBuilder.build(boolQueryBuilder); + assertCommonFilterQueries(userBoolQueryBuilder); + + assertThat(userBoolQueryBuilder.must(), hasSize(1)); + assertThat(userBoolQueryBuilder.should(), empty()); + assertThat(userBoolQueryBuilder.mustNot(), empty()); + assertThat(userBoolQueryBuilder.filter(), hasItem(QueryBuilders.termQuery("type", "user"))); + assertThat(userBoolQueryBuilder.must().get(0).getClass(), is(BoolQueryBuilder.class)); + final BoolQueryBuilder translated = (BoolQueryBuilder) userBoolQueryBuilder.must().get(0); + assertThat(translated.must(), equalTo(boolQueryBuilder.must())); + assertThat(translated.should(), equalTo(boolQueryBuilder.should())); + assertThat(translated.mustNot(), equalTo(boolQueryBuilder.mustNot())); + assertThat(translated.minimumShouldMatch(), equalTo(boolQueryBuilder.minimumShouldMatch())); + assertThat(translated.filter(), equalTo(boolQueryBuilder.filter())); + } + + public void testFieldNameTranslation() { + String field = randomAllowedField(); + final WildcardQueryBuilder wildcardQueryBuilder = QueryBuilders.wildcardQuery(field, "*" + randomAlphaOfLength(3)); + final UserBoolQueryBuilder userBoolQueryBuilder = UserBoolQueryBuilder.build(wildcardQueryBuilder); + assertCommonFilterQueries(userBoolQueryBuilder); + assertThat(userBoolQueryBuilder.must().get(0), equalTo(QueryBuilders.wildcardQuery(field, wildcardQueryBuilder.value()))); + } + + public void testAllowListOfFieldNames() { + final String fieldName = randomValueOtherThanMany( + v -> Arrays.asList(allowedIndexFieldNames).contains(v), + () -> randomFrom(randomAlphaOfLengthBetween(3, 20), "type", "password") + ); + + // MatchAllQueryBuilder doesn't do any translation, so skip + final QueryBuilder q1 = randomValueOtherThanMany( + q -> q.getClass() == MatchAllQueryBuilder.class, + () -> randomSimpleQuery(fieldName) + ); + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> UserBoolQueryBuilder.build(q1)); + + assertThat(exception.getMessage(), containsString("Field [" + fieldName + "] is not allowed")); + } + + public void testTermsLookupIsNotAllowed() { + final TermsQueryBuilder q1 = QueryBuilders.termsLookupQuery("roles", new TermsLookup("lookup", "1", "id")); + final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> UserBoolQueryBuilder.build(q1)); + assertThat(e1.getMessage(), containsString("Terms query with terms lookup is not supported for User query")); + } + + public void testDisallowedQueryTypes() { + final AbstractQueryBuilder> q1 = randomFrom( + QueryBuilders.idsQuery(), + QueryBuilders.rangeQuery(randomAlphaOfLength(5)), + QueryBuilders.matchQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.constantScoreQuery(mock(QueryBuilder.class)), + QueryBuilders.boostingQuery(mock(QueryBuilder.class), mock(QueryBuilder.class)), + QueryBuilders.queryStringQuery("q=a:42"), + QueryBuilders.simpleQueryStringQuery(randomAlphaOfLength(5)), + QueryBuilders.combinedFieldsQuery(randomAlphaOfLength(5)), + QueryBuilders.disMaxQuery(), + QueryBuilders.distanceFeatureQuery( + randomAlphaOfLength(5), + mock(DistanceFeatureQueryBuilder.Origin.class), + randomAlphaOfLength(5) + ), + QueryBuilders.fieldMaskingSpanQuery(mock(SpanQueryBuilder.class), randomAlphaOfLength(5)), + QueryBuilders.functionScoreQuery(mock(QueryBuilder.class)), + QueryBuilders.fuzzyQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.wrapperQuery(randomAlphaOfLength(5)), + QueryBuilders.matchBoolPrefixQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.matchPhraseQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.matchPhrasePrefixQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.moreLikeThisQuery(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(5))), + QueryBuilders.regexpQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.spanTermQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.spanOrQuery(mock(SpanQueryBuilder.class)), + QueryBuilders.spanContainingQuery(mock(SpanQueryBuilder.class), mock(SpanQueryBuilder.class)), + QueryBuilders.spanFirstQuery(mock(SpanQueryBuilder.class), randomIntBetween(1, 3)), + QueryBuilders.spanMultiTermQueryBuilder(mock(MultiTermQueryBuilder.class)), + QueryBuilders.spanNotQuery(mock(SpanQueryBuilder.class), mock(SpanQueryBuilder.class)), + QueryBuilders.scriptQuery(new Script(randomAlphaOfLength(5))), + QueryBuilders.scriptScoreQuery(mock(QueryBuilder.class), new Script(randomAlphaOfLength(5))), + QueryBuilders.geoWithinQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.geoBoundingBoxQuery(randomAlphaOfLength(5)), + QueryBuilders.geoDisjointQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.geoDistanceQuery(randomAlphaOfLength(5)), + QueryBuilders.geoIntersectionQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)), + QueryBuilders.geoShapeQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)) + ); + + final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> UserBoolQueryBuilder.build(q1)); + assertThat(e1.getMessage(), containsString("Query type [" + q1.getName() + "] is not supported for User query")); + } + + public void testWillSetAllowedFields() { + final UserBoolQueryBuilder userBoolQueryBuilder = UserBoolQueryBuilder.build(randomSimpleQuery()); + + final SearchExecutionContext context = mock(SearchExecutionContext.class); + doAnswer(invocationOnMock -> { + final Object[] args = invocationOnMock.getArguments(); + @SuppressWarnings("unchecked") + final Predicate predicate = (Predicate) args[0]; + assertTrue(predicate.getClass().getName().startsWith(UserBoolQueryBuilder.class.getName())); + testAllowedIndexFieldName(predicate); + return null; + }).when(context).setAllowedFields(any()); + try { + if (randomBoolean()) { + userBoolQueryBuilder.doToQuery(context); + } else { + userBoolQueryBuilder.doRewrite(context); + } + } catch (Exception e) { + // just ignore any exception from superclass since we only need verify the allowedFields are set + } finally { + verify(context).setAllowedFields(any()); + } + } + + private void testAllowedIndexFieldName(Predicate predicate) { + final String allowedField = randomAllowedField(); + assertTrue(predicate.test(allowedField)); + + final String disallowedField = randomBoolean() ? (randomAlphaOfLengthBetween(1, 3) + allowedField) : (allowedField.substring(1)); + assertFalse(predicate.test(disallowedField)); + } + + private void assertCommonFilterQueries(UserBoolQueryBuilder qb) { + final List tqb = qb.filter() + .stream() + .filter(q -> q.getClass() == TermQueryBuilder.class) + .map(q -> (TermQueryBuilder) q) + .toList(); + assertTrue(tqb.stream().anyMatch(q -> q.equals(QueryBuilders.termQuery("type", "user")))); + } + + private String randomAllowedField() { + return randomFrom(allowedIndexFieldNames); + } + + private QueryBuilder randomSimpleQuery() { + return randomSimpleQuery(randomAllowedField()); + } + + private QueryBuilder randomSimpleQuery(String fieldName) { + return randomFrom( + QueryBuilders.termQuery(fieldName, randomAlphaOfLengthBetween(3, 8)), + QueryBuilders.termsQuery(fieldName, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))), + QueryBuilders.prefixQuery(fieldName, randomAlphaOfLength(randomIntBetween(3, 10))), + QueryBuilders.wildcardQuery(fieldName, "*" + randomAlphaOfLength(randomIntBetween(3, 10))), + QueryBuilders.matchAllQuery(), + QueryBuilders.existsQuery(fieldName) + ); + } +} diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java index 0de11109e33e7..d940f366ef942 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/InternalGeoLine.java @@ -88,7 +88,7 @@ public InternalGeoLine(StreamInput in) throws IOException { this.includeSorts = in.readBoolean(); this.sortOrder = SortOrder.readFromStream(in); this.size = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { nonOverlapping = in.readBoolean(); simplified = in.readBoolean(); } else { @@ -105,7 +105,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeBoolean(includeSorts); sortOrder.writeTo(out); out.writeVInt(size); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X)) { out.writeBoolean(nonOverlapping); out.writeBoolean(simplified); } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java index f667ae4b80d03..7ad54901e2d06 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.fieldcaps.TransportFieldCapabilitiesAction; import org.elasticsearch.action.support.ActionFilter; @@ -271,7 +270,7 @@ protected TaskId cancelTaskWithXOpaqueId(String id, String action) { TaskId taskId = findTaskWithXOpaqueId(id, action); assertNotNull(taskId); logger.trace("Cancelling task " + taskId); - CancelTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); + ListTasksResponse response = clusterAdmin().prepareCancelTasks().setTargetTaskId(taskId).get(); assertThat(response.getTasks(), hasSize(1)); assertThat(response.getTasks().get(0).action(), equalTo(action)); logger.trace("Task is cancelled " + taskId); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java index 36a42aaad7161..8fa41017762a7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java @@ -156,14 +156,12 @@ static void handle( logSearchResponse(response, log); } - SearchHit[] hits = response.getHits().getHits(); - SearchHitRowSet rowSet = makeRowSet.get(); if (rowSet.hasRemaining() == false) { closePointInTime(client, response.pointInTimeId(), listener.delegateFailureAndWrap((l, r) -> l.onResponse(Page.last(rowSet)))); } else { - updateSearchAfter(hits, source); + updateSearchAfter(response.getHits().getHits(), source); SearchHitCursor nextCursor = new SearchHitCursor( source, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java index ba6a9854e4254..b6e3e8b759352 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java @@ -28,9 +28,8 @@ * Extracts rows from an array of {@link SearchHit}. */ class SearchHitRowSet extends ResultRowSet { - private final SearchHit[] hits; + private final SearchHits hits; private final Map> flatInnerHits = new HashMap<>(); - private final Set innerHits = new LinkedHashSet<>(); private final String innerHit; private final int size; @@ -42,13 +41,14 @@ class SearchHitRowSet extends ResultRowSet { SearchHitRowSet(List exts, BitSet mask, int sizeRequested, int limit, SearchResponse response) { super(exts, mask); - this.hits = response.getHits().getHits(); + this.hits = response.getHits().asUnpooled(); // Since the results might contain nested docs, the iteration is similar to that of Aggregation // namely it discovers the nested docs and then, for iteration, increments the deepest level first // and eventually carries that over to the top level String innerHit = null; + Set innerHits = new LinkedHashSet<>(); for (HitExtractor ex : exts) { if (ex.hitName() != null) { innerHits.add(ex.hitName()); @@ -58,7 +58,7 @@ class SearchHitRowSet extends ResultRowSet { } } - int sz = hits.length; + int sz = hits.getHits().length; int maxDepth = 0; if (innerHits.isEmpty() == false) { @@ -106,7 +106,7 @@ protected Object extractValue(HitExtractor e) { int extractorLevel = e.hitName() == null ? 0 : 1; SearchHit hit = null; - SearchHit[] sh = hits; + SearchHit[] sh = hits.getHits(); for (int lvl = 0; lvl <= extractorLevel; lvl++) { // TODO: add support for multi-nested doc if (hit != null) { @@ -172,7 +172,7 @@ protected boolean doNext() { // increment last row indexPerLevel[indexPerLevel.length - 1]++; // then check size - SearchHit[] sh = hits; + SearchHit[] sh = hits.getHits(); for (int lvl = 0; lvl < indexPerLevel.length; lvl++) { if (indexPerLevel[lvl] == sh.length) { // reset the current branch @@ -181,7 +181,7 @@ protected boolean doNext() { indexPerLevel[lvl - 1]++; // restart the loop lvl = 0; - sh = hits; + sh = hits.getHits(); } else { SearchHit h = sh[indexPerLevel[lvl]]; // TODO: improve this for multi-nested responses diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java index d06a239e61ce7..112be29d2dcd8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingExtractorTests.java @@ -82,7 +82,7 @@ public void testGet() { double value = randomDouble(); double expected = Math.log(value); DocumentField field = new DocumentField(fieldName, singletonList(value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); assertEquals(expected, extractor.process(hit)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index 5c3fc378d90c1..b951f96e8b933 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -94,7 +94,7 @@ public void testGetDottedValueWithDocValues() { } DocumentField field = new DocumentField(fieldName, documentFieldValues); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); assertEquals(result, extractor.extract(hit)); @@ -112,7 +112,7 @@ public void testGetDocValue() { documentFieldValues.add(randomValue()); } DocumentField field = new DocumentField(fieldName, documentFieldValues); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); assertEquals(result, extractor.extract(hit)); @@ -127,7 +127,7 @@ public void testGetDate() { ZonedDateTime zdt = DateUtils.asDateTimeWithMillis(millis, zoneId).plusNanos(nanosOnly); List documentFieldValues = Collections.singletonList(StringUtils.toString(zdt)); DocumentField field = new DocumentField("my_date_nanos_field", documentFieldValues); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField("my_date_nanos_field", field); FieldHitExtractor extractor = new FieldHitExtractor("my_date_nanos_field", DATETIME, zoneId, LENIENT); assertEquals(zdt, extractor.extract(hit)); @@ -144,7 +144,7 @@ public void testMultiValuedDocValue() { String fieldName = randomAlphaOfLength(5); FieldHitExtractor fe = getFieldHitExtractor(fieldName); DocumentField field = new DocumentField(fieldName, asList("a", "b")); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); @@ -154,7 +154,7 @@ public void testExtractSourcePath() { FieldHitExtractor fe = getFieldHitExtractor("a.b.c"); Object value = randomValue(); DocumentField field = new DocumentField("a.b.c", singletonList(value)); - SearchHit hit = new SearchHit(1, null, null); + SearchHit hit = SearchHit.unpooled(1, null, null); hit.setDocumentField("a.b.c", field); assertThat(fe.extract(hit), is(value)); } @@ -163,7 +163,7 @@ public void testMultiValuedSource() { FieldHitExtractor fe = getFieldHitExtractor("a"); Object value = randomValue(); DocumentField field = new DocumentField("a", asList(value, value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField("a", field); Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [a]) are not supported")); @@ -174,7 +174,7 @@ public void testMultiValuedSourceAllowed() { Object valueA = randomValue(); Object valueB = randomValue(); DocumentField field = new DocumentField("a", asList(valueA, valueB)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField("a", field); assertEquals(valueA, fe.extract(hit)); } @@ -187,7 +187,7 @@ public void testGeoShapeExtraction() { map.put("coordinates", asList(1d, 2d)); map.put("type", "Point"); DocumentField field = new DocumentField(fieldName, singletonList(map)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); assertEquals(new GeoShape(1, 2), fe.extract(hit)); @@ -204,14 +204,14 @@ public void testMultipleGeoShapeExtraction() { map2.put("coordinates", asList(3d, 4d)); map2.put("type", "Point"); DocumentField field = new DocumentField(fieldName, asList(map1, map2)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); Exception ex = expectThrows(InvalidArgumentException.class, () -> fe.extract(hit)); assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); FieldHitExtractor lenientFe = new FieldHitExtractor(fieldName, randomBoolean() ? GEO_SHAPE : SHAPE, UTC, LENIENT); - SearchHit searchHit = new SearchHit(1, "1"); + SearchHit searchHit = SearchHit.unpooled(1, "1"); searchHit.setDocumentField(fieldName, new DocumentField(fieldName, singletonList(map2))); assertEquals(new GeoShape(3, 4), lenientFe.extract(searchHit)); } @@ -223,7 +223,7 @@ public void testUnsignedLongExtraction() { String fieldName = randomAlphaOfLength(10); DocumentField field = new DocumentField(fieldName, singletonList(value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); FieldHitExtractor fe = new FieldHitExtractor(fieldName, UNSIGNED_LONG, randomZone(), randomBoolean() ? NONE : LENIENT); @@ -237,7 +237,7 @@ public void testVersionExtraction() { String fieldName = randomAlphaOfLength(10); DocumentField field = new DocumentField(fieldName, singletonList(value)); - SearchHit hit = new SearchHit(1, null); + SearchHit hit = SearchHit.unpooled(1, null); hit.setDocumentField(fieldName, field); FieldHitExtractor fe = new FieldHitExtractor(fieldName, VERSION, randomZone(), randomBoolean() ? NONE : LENIENT); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java index fdce6cbcf0c2f..5d007218aeeb1 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractorTests.java @@ -14,7 +14,7 @@ public void testGet() { int times = between(1, 1000); for (int i = 0; i < times; i++) { float score = randomFloat(); - SearchHit hit = new SearchHit(1); + SearchHit hit = SearchHit.unpooled(1); hit.score(score); assertEquals(score, ScoreExtractor.INSTANCE.extract(hit)); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java index b7f123f82cf98..9e83df706a77b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractorTests.java @@ -115,7 +115,7 @@ public void testExtractUnsignedLong() { private SearchHits searchHitsOf(Object value) { TotalHits totalHits = new TotalHits(10, TotalHits.Relation.EQUAL_TO); - SearchHit searchHit = new SearchHit(1, "docId"); + SearchHit searchHit = SearchHit.unpooled(1, "docId"); searchHit.addDocumentFields( Collections.singletonMap("topHitsAgg", new DocumentField("field", Collections.singletonList(value))), Collections.singletonMap( @@ -123,6 +123,6 @@ private SearchHits searchHitsOf(Object value) { new DocumentField("_ignored", Collections.singletonList(randomValueOtherThan(value, () -> randomAlphaOfLength(5)))) ) ); - return new SearchHits(new SearchHit[] { searchHit }, totalHits, 0.0f); + return SearchHits.unpooled(new SearchHit[] { searchHit }, totalHits, 0.0f); } } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml new file mode 100644 index 0000000000000..0f8dbbb97f57f --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/110_all_null.yml @@ -0,0 +1,263 @@ +--- +setup: + - skip: + version: " - 8.12.99" + reason: "feature added in 8.13" + + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 5 + mappings: + properties: + data: + type: long + data_d: + type: double + count: + type: long + count_d: + type: double + time: + type: long + color: + type: keyword + always_null: + type: keyword + non_null_out_of_match: + type: keyword + - do: + bulk: + index: "test" + refresh: true + body: + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275187, "color": "red" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275188, "color": "blue" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275189, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275190, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275191, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275192, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275193, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275194, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275195, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275196, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275197, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275198, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275199, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275200, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275201, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275202, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275203, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275204, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275205, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275206, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275207, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275208, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275209, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275210, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275211, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275212, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275213, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275214, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275215, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275216, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275217, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275218, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275219, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275220, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275221, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275222, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 40, "data_d": 1, "count_d": 40, "time": 1674835275223, "color": "red", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 42, "data_d": 2, "count_d": 42, "time": 1674835275224, "color": "blue", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 1, "count": 44, "data_d": 1, "count_d": 44, "time": 1674835275225, "color": "green", "non_null_out_of_match": "a" } + - { "index": { } } + - { "data": 2, "count": 46, "data_d": 2, "count_d": 46, "time": 1674835275226, "color": "red", "non_null_out_of_match": "a" } + +--- +row wise and keep null: + - do: + esql.query: + drop_null_columns: false + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: false + + - length: {columns: 8} + - match: {columns.0.name: "always_null"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "count"} + - match: {columns.2.type: "long"} + - match: {columns.3.name: "count_d"} + - match: {columns.3.type: "double"} + - match: {columns.4.name: "data"} + - match: {columns.4.type: "long"} + - match: {columns.5.name: "data_d"} + - match: {columns.5.type: "double"} + - match: {columns.6.name: "non_null_out_of_match"} + - match: {columns.6.type: "keyword"} + - match: {columns.7.name: "time"} + - match: {columns.7.type: "long"} + - length: {values: 2} + - length: {values.0: 8} + - is_false: values.0.0 + - match: {values.0.1: red} + +--- +row wise and drop null: + - do: + esql.query: + drop_null_columns: true + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: false + + - length: {all_columns: 8} + - match: {all_columns.0.name: "always_null"} + - match: {all_columns.0.type: "keyword"} + - match: {all_columns.1.name: "color"} + - match: {all_columns.1.type: "keyword"} + - match: {all_columns.2.name: "count"} + - match: {all_columns.2.type: "long"} + - match: {all_columns.3.name: "count_d"} + - match: {all_columns.3.type: "double"} + - match: {all_columns.4.name: "data"} + - match: {all_columns.4.type: "long"} + - match: {all_columns.5.name: "data_d"} + - match: {all_columns.5.type: "double"} + - match: {all_columns.6.name: "non_null_out_of_match"} + - match: {all_columns.6.type: "keyword"} + - match: {all_columns.7.name: "time"} + - match: {all_columns.7.type: "long"} + - length: {columns: 6} + - match: {columns.0.name: "color"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "count"} + - match: {columns.1.type: "long"} + - match: {columns.2.name: "count_d"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "data"} + - match: {columns.3.type: "long"} + - match: {columns.4.name: "data_d"} + - match: {columns.4.type: "double"} + - match: {columns.5.name: "time"} + - match: {columns.5.type: "long"} + - length: {values: 2} + - length: {values.0: 6} + - match: {values.0.0: red} + +--- +columnar and keep null: + - do: + esql.query: + drop_null_columns: false + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: true + + - length: {columns: 8} + - match: {columns.0.name: "always_null"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {columns.2.name: "count"} + - match: {columns.2.type: "long"} + - match: {columns.3.name: "count_d"} + - match: {columns.3.type: "double"} + - match: {columns.4.name: "data"} + - match: {columns.4.type: "long"} + - match: {columns.5.name: "data_d"} + - match: {columns.5.type: "double"} + - match: {columns.6.name: "non_null_out_of_match"} + - match: {columns.6.type: "keyword"} + - match: {columns.7.name: "time"} + - match: {columns.7.type: "long"} + - length: {values: 8} + - length: {values.0: 2} + - is_false: values.0.0 + - match: {values.1.0: red} + +--- +columnar and drop null: + - do: + esql.query: + drop_null_columns: true + body: + query: 'FROM test | WHERE time <= 1674835275188 | SORT time ASC | LIMIT 2' + columnar: true + + - length: {all_columns: 8} + - match: {all_columns.0.name: "always_null"} + - match: {all_columns.0.type: "keyword"} + - match: {all_columns.1.name: "color"} + - match: {all_columns.1.type: "keyword"} + - match: {all_columns.2.name: "count"} + - match: {all_columns.2.type: "long"} + - match: {all_columns.3.name: "count_d"} + - match: {all_columns.3.type: "double"} + - match: {all_columns.4.name: "data"} + - match: {all_columns.4.type: "long"} + - match: {all_columns.5.name: "data_d"} + - match: {all_columns.5.type: "double"} + - match: {all_columns.6.name: "non_null_out_of_match"} + - match: {all_columns.6.type: "keyword"} + - match: {all_columns.7.name: "time"} + - match: {all_columns.7.type: "long"} + - length: {columns: 6} + - match: {columns.0.name: "color"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "count"} + - match: {columns.1.type: "long"} + - match: {columns.2.name: "count_d"} + - match: {columns.2.type: "double"} + - match: {columns.3.name: "data"} + - match: {columns.3.type: "long"} + - match: {columns.4.name: "data_d"} + - match: {columns.4.type: "double"} + - match: {columns.5.name: "time"} + - match: {columns.5.type: "long"} + - length: {values: 6} + - length: {values.0: 2} + - match: {values.0.0: red} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml index 06fc2c8a3fa99..8b28776e42fcd 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/40_unsupported_types.yml @@ -142,7 +142,7 @@ unsupported: - match: { columns.9.name: geo_point_alias } - match: { columns.9.type: geo_point } - match: { columns.10.name: geo_shape } - - match: { columns.10.type: unsupported } + - match: { columns.10.type: geo_shape } - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - match: { columns.12.name: integer_range } @@ -170,7 +170,7 @@ unsupported: - match: { columns.23.name: search_as_you_type._index_prefix } - match: { columns.23.type: unsupported } - match: { columns.24.name: shape } - - match: { columns.24.type: unsupported } + - match: { columns.24.type: cartesian_shape } - match: { columns.25.name: some_doc.bar } - match: { columns.25.type: long } - match: { columns.26.name: some_doc.foo } @@ -191,7 +191,7 @@ unsupported: - match: { values.0.7: null } - match: { values.0.8: "POINT (10.0 12.0)" } - match: { values.0.9: "POINT (10.0 12.0)" } - - match: { values.0.10: null } + - match: { values.0.10: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } @@ -205,7 +205,7 @@ unsupported: - match: { values.0.21: null } - match: { values.0.22: null } - match: { values.0.23: null } - - match: { values.0.24: null } + - match: { values.0.24: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } - match: { values.0.25: 12 } - match: { values.0.26: xy } - match: { values.0.27: "foo bar" } @@ -238,7 +238,7 @@ unsupported: - match: { columns.9.name: geo_point_alias } - match: { columns.9.type: geo_point } - match: { columns.10.name: geo_shape } - - match: { columns.10.type: unsupported } + - match: { columns.10.type: geo_shape } - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - match: { columns.12.name: integer_range } @@ -266,7 +266,7 @@ unsupported: - match: { columns.23.name: search_as_you_type._index_prefix } - match: { columns.23.type: unsupported } - match: { columns.24.name: shape } - - match: { columns.24.type: unsupported } + - match: { columns.24.type: cartesian_shape } - match: { columns.25.name: some_doc.bar } - match: { columns.25.type: long } - match: { columns.26.name: some_doc.foo } @@ -282,8 +282,8 @@ unsupported: - do: esql.query: body: - query: 'from test | keep shape | limit 0' - - match: { columns.0.name: shape } + query: 'from test | keep histogram | limit 0' + - match: { columns.0.name: histogram } - match: { columns.0.type: unsupported } - length: { values: 0 } @@ -322,7 +322,7 @@ unsupported with sort: - match: { columns.9.name: geo_point_alias } - match: { columns.9.type: geo_point } - match: { columns.10.name: geo_shape } - - match: { columns.10.type: unsupported } + - match: { columns.10.type: geo_shape } - match: { columns.11.name: histogram } - match: { columns.11.type: unsupported } - match: { columns.12.name: integer_range } @@ -350,7 +350,7 @@ unsupported with sort: - match: { columns.23.name: search_as_you_type._index_prefix } - match: { columns.23.type: unsupported } - match: { columns.24.name: shape } - - match: { columns.24.type: unsupported } + - match: { columns.24.type: cartesian_shape } - match: { columns.25.name: some_doc.bar } - match: { columns.25.type: long } - match: { columns.26.name: some_doc.foo } @@ -371,7 +371,7 @@ unsupported with sort: - match: { values.0.7: null } - match: { values.0.8: "POINT (10.0 12.0)" } - match: { values.0.9: "POINT (10.0 12.0)" } - - match: { values.0.10: null } + - match: { values.0.10: "LINESTRING (-97.154 25.996, -97.159 25.998, -97.181 25.991, -97.187 25.985)" } - match: { values.0.11: null } - match: { values.0.12: null } - match: { values.0.13: null } @@ -385,7 +385,7 @@ unsupported with sort: - match: { values.0.21: null } - match: { values.0.22: null } - match: { values.0.23: null } - - match: { values.0.24: null } + - match: { values.0.24: "LINESTRING (-377.03653 389.897676, -377.009051 389.889939)" } - match: { values.0.25: 12 } - match: { values.0.26: xy } - match: { values.0.27: "foo bar" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml index fdccf473b358a..69b676c92ed72 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/3rd_party_deployment.yml @@ -218,6 +218,9 @@ setup: --- "Test start deployment fails while model download in progress": + - skip: + features: fips_140 + reason: "@AwaitsFix https://github.com/elastic/elasticsearch/issues/104414" - do: ml.put_trained_model: model_id: .elser_model_2 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 5adbf782f3236..8bc863e6fca9f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -1,10 +1,8 @@ --- setup: - skip: - version: all - reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/104038 - # version: " - 8.12.99" - # reason: "Universal Profiling test infrastructure is available in 8.12+" + version: " - 8.12.99" + reason: "Universal Profiling test infrastructure is available in 8.12+" - do: cluster.put_settings: diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index 843dee43706f8..1d44ed5a1f8ef 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.TransportDeleteIndexAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; @@ -27,6 +26,7 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -848,7 +848,7 @@ public void refresh(ActionListener listener) { client.threadPool().getThreadContext(), TRANSFORM_ORIGIN, new RefreshRequest(TransformInternalIndexConstants.LATEST_INDEX_NAME), - ActionListener.wrap(r -> listener.onResponse(true), listener::onFailure), + ActionListener.wrap(r -> listener.onResponse(true), listener::onFailure), client.admin().indices()::refresh ); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java index d76b6b67368f9..69139bc3f7561 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/persistence/SeqNoPrimaryTermAndIndexTests.java @@ -29,7 +29,7 @@ public void testEquals() { } public void testFromSearchHit() { - SearchHit searchHit = new SearchHit(1); + SearchHit searchHit = SearchHit.unpooled(1); long seqNo = randomLongBetween(-2, 10_000); long primaryTerm = randomLongBetween(-2, 10_000); String index = randomAlphaOfLength(10); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index 8ee7e902285c9..fa8e867d77a49 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -544,7 +544,11 @@ protected void ActionListener.respondAndRelease( listener, (Response) new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled( + new SearchHit[] { SearchHit.unpooled(1) }, + new TotalHits(1L, TotalHits.Relation.EQUAL_TO), + 1.0f + ), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index 5dee74cccee7a..a18c926e21da6 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -513,7 +513,7 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti ); final SearchResponse searchResponse = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), @@ -606,7 +606,7 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce ); final SearchResponse searchResponse = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), @@ -702,7 +702,7 @@ public void testFailureCounterIsResetOnSuccess() throws Exception { ); final SearchResponse searchResponse = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java index 708cb3d93cbed..512fd7a2383a1 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; @@ -112,7 +113,7 @@ public void testTermsFieldCollector() throws IOException { Aggregations aggs = new Aggregations(Collections.singletonList(composite)); SearchResponse response = new SearchResponse( - null, + SearchHits.EMPTY_WITH_TOTAL_HITS, aggs, null, false, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java index dab6d8518d28f..fd4e60e485200 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; @@ -171,7 +172,7 @@ private static QueryBuilder buildFilterQuery(ChangeCollector collector) { private static SearchResponse buildSearchResponse(SingleValue minTimestamp, SingleValue maxTimestamp) { return new SearchResponse( - null, + SearchHits.EMPTY_WITH_TOTAL_HITS, new Aggregations(Arrays.asList(minTimestamp, maxTimestamp)), null, false, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java index 67f923769ffe3..be0bb177267bc 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; @@ -19,6 +20,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.SearchHits; @@ -39,6 +41,8 @@ import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfigTests; import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; +import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; +import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfigTests; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; @@ -239,7 +243,30 @@ public void testProcessSearchResponse() { SettingsConfigTests.randomSettingsConfig(), TransformConfigVersion.CURRENT, Collections.emptySet() - ); + ) { + @Override + public Tuple, Map> processSearchResponse( + SearchResponse searchResponse, + String destinationIndex, + String destinationPipeline, + Map fieldTypeMap, + TransformIndexerStats stats, + TransformProgress progress + ) { + try { + return super.processSearchResponse( + searchResponse, + destinationIndex, + destinationPipeline, + fieldTypeMap, + stats, + progress + ); + } finally { + searchResponse.decRef(); + } + } + }; Aggregations aggs = null; assertThat(pivot.processSearchResponse(searchResponseFromAggs(aggs), null, null, null, null, null), is(nullValue())); @@ -324,7 +351,22 @@ public void testPreviewForCompositeAggregation() throws Exception { } private static SearchResponse searchResponseFromAggs(Aggregations aggs) { - return new SearchResponse(null, aggs, null, false, null, null, 1, null, 10, 5, 0, 0, new ShardSearchFailure[0], null); + return new SearchResponse( + SearchHits.EMPTY_WITH_TOTAL_HITS, + aggs, + null, + false, + null, + null, + 1, + null, + 10, + 5, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null + ); } private class MyMockClient extends NoOpClient { diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index f02b3f865adf0..d97b0bd81a101 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -100,12 +100,12 @@ public void testExecuteWithAggs() { public void testExecuteAccessHits() throws Exception { CompareCondition condition = new CompareCondition("ctx.payload.hits.hits.0._score", CompareCondition.Op.EQ, 1, Clock.systemUTC()); - SearchHit hit = new SearchHit(0, "1"); + SearchHit hit = SearchHit.unpooled(0, "1"); hit.score(1f); hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); SearchResponse response = new SearchResponse( - new SearchHits(new SearchHit[] { hit }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), + SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), null, null, false, diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index b82622fbd4819..67835971cd15a 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -180,7 +180,7 @@ public void testActionConditionWithFailures() throws Exception { final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - searchHitReference.set(response.getHits().getAt(0)); + searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); } @@ -240,7 +240,7 @@ public void testActionCondition() throws Exception { final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); - searchHitReference.set(response.getHits().getAt(0)); + searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index d1153b6eca3e6..265b252082c68 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.watcher.test.integration; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.rest.RestStatus; @@ -65,7 +65,7 @@ public void testThatLoadingWithNonExistingIndexWorks() throws Exception { ensureGreen(HistoryStoreField.DATA_STREAM); assertBusy(() -> { - RefreshResponse refreshResponse = indicesAdmin().prepareRefresh(".watcher-history*").get(); + BroadcastResponse refreshResponse = indicesAdmin().prepareRefresh(".watcher-history*").get(); assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); assertResponse( prepareSearch(".watcher-history*").setSize(0), diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index ea9295600fe41..a067b99c6bff0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -10,11 +10,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -404,7 +404,7 @@ private Collection loadWatches(ClusterState clusterState) { // Non private for unit testing purposes void refreshWatches(IndexMetadata indexMetadata) { - RefreshResponse refreshResponse = client.admin() + BroadcastResponse refreshResponse = client.admin() .indices() .refresh(new RefreshRequest(INDEX)) .actionGet(TimeValue.timeValueSeconds(5)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index c2ed68d8fa1bd..19bac967c576a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; @@ -21,6 +20,7 @@ import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -162,12 +162,12 @@ void stopExecutor() {} ClusterState clusterState = csBuilder.build(); // response setup, successful refresh response - RefreshResponse refreshResponse = mock(RefreshResponse.class); + BroadcastResponse refreshResponse = mock(BroadcastResponse.class); when(refreshResponse.getSuccessfulShards()).thenReturn( clusterState.getMetadata().getIndices().get(Watch.INDEX).getNumberOfShards() ); doAnswer(invocation -> { - ActionListener listener = (ActionListener) invocation.getArguments()[2]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onResponse(refreshResponse); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(RefreshRequest.class), anyActionListener()); @@ -196,7 +196,7 @@ void stopExecutor() {} SearchHit[] hits = new SearchHit[count]; for (int i = 0; i < count; i++) { String id = String.valueOf(i); - SearchHit hit = new SearchHit(1, id); + SearchHit hit = SearchHit.unpooled(1, id); hit.version(1L); hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever")); hits[i] = hit; @@ -212,7 +212,7 @@ void stopExecutor() {} when(watch.status()).thenReturn(watchStatus); when(parser.parse(eq(id), eq(true), any(), eq(XContentType.JSON), anyLong(), anyLong())).thenReturn(watch); } - SearchHits searchHits = new SearchHits(hits, new TotalHits(count, TotalHits.Relation.EQUAL_TO), 1.0f); + SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(count, TotalHits.Relation.EQUAL_TO), 1.0f); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; ActionListener.respondAndRelease( diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 60fa2581b4218..b75ac51c3510f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor2; @@ -24,6 +23,7 @@ import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; +import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -202,7 +202,7 @@ public void testFindTriggeredWatchesGoodCase() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[2]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onResponse(mockRefreshResponse(1, 1)); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -210,14 +210,14 @@ public void testFindTriggeredWatchesGoodCase() { SearchResponse searchResponse1 = mock(SearchResponse.class); when(searchResponse1.getSuccessfulShards()).thenReturn(1); when(searchResponse1.getTotalShards()).thenReturn(1); - final BytesArray source = new BytesArray("{}"); + BytesArray source = new BytesArray("{}"); { - final SearchHit hit = new SearchHit(0, "first_foo"); + SearchHit hit = SearchHit.unpooled(0, "first_foo"); hit.version(1L); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); hit.sourceRef(source); when(searchResponse1.getHits()).thenReturn( - new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f) + SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f) ); } when(searchResponse1.getScrollId()).thenReturn("_scrollId"); @@ -228,20 +228,20 @@ public void testFindTriggeredWatchesGoodCase() { return null; }).when(client).execute(eq(TransportSearchAction.TYPE), any(), any()); - // First return a scroll response with a single hit and then with no hits doAnswer(invocation -> { SearchScrollRequest request = (SearchScrollRequest) invocation.getArguments()[1]; @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[2]; if (request.scrollId().equals("_scrollId")) { - final var hit2 = new SearchHit(0, "second_foo"); - hit2.version(1L); - hit2.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); - hit2.sourceRef(source); + // First return a scroll response with a single hit and then with no hits + var hit = SearchHit.unpooled(0, "second_foo"); + hit.version(1L); + hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); + hit.sourceRef(source); ActionListener.respondAndRelease( listener, new SearchResponse( - new SearchHits(new SearchHit[] { hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f), null, null, false, @@ -409,7 +409,7 @@ public void testIndexNotFoundButInMetadata() { doAnswer(invocation -> { @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocation.getArguments()[2]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(new IndexNotFoundException(TriggeredWatchStoreField.INDEX_NAME)); return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); @@ -507,8 +507,8 @@ public void testDeleteTriggeredWatches() throws Exception { assertThat(response.getItems().length, is(1)); } - private RefreshResponse mockRefreshResponse(int total, int successful) { - RefreshResponse refreshResponse = mock(RefreshResponse.class); + private BroadcastResponse mockRefreshResponse(int total, int successful) { + BroadcastResponse refreshResponse = mock(BroadcastResponse.class); when(refreshResponse.getTotalShards()).thenReturn(total); when(refreshResponse.getSuccessfulShards()).thenReturn(successful); return refreshResponse;